context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/logging/v2/logging_metrics.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Google.Cloud.Logging.V2 {
/// <summary>Holder for reflection information generated from google/logging/v2/logging_metrics.proto</summary>
public static partial class LoggingMetricsReflection {
#region Descriptor
/// <summary>File descriptor for google/logging/v2/logging_metrics.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static LoggingMetricsReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"Cidnb29nbGUvbG9nZ2luZy92Mi9sb2dnaW5nX21ldHJpY3MucHJvdG8SEWdv",
"b2dsZS5sb2dnaW5nLnYyGhxnb29nbGUvYXBpL2Fubm90YXRpb25zLnByb3Rv",
"Gh1nb29nbGUvYXBpL2Rpc3RyaWJ1dGlvbi5wcm90bxoXZ29vZ2xlL2FwaS9t",
"ZXRyaWMucHJvdG8aG2dvb2dsZS9wcm90b2J1Zi9lbXB0eS5wcm90bxogZ29v",
"Z2xlL3Byb3RvYnVmL2ZpZWxkX21hc2sucHJvdG8irQMKCUxvZ01ldHJpYxIM",
"CgRuYW1lGAEgASgJEhMKC2Rlc2NyaXB0aW9uGAIgASgJEg4KBmZpbHRlchgD",
"IAEoCRI3ChFtZXRyaWNfZGVzY3JpcHRvchgFIAEoCzIcLmdvb2dsZS5hcGku",
"TWV0cmljRGVzY3JpcHRvchIXCg92YWx1ZV9leHRyYWN0b3IYBiABKAkSSwoQ",
"bGFiZWxfZXh0cmFjdG9ycxgHIAMoCzIxLmdvb2dsZS5sb2dnaW5nLnYyLkxv",
"Z01ldHJpYy5MYWJlbEV4dHJhY3RvcnNFbnRyeRI+Cg5idWNrZXRfb3B0aW9u",
"cxgIIAEoCzImLmdvb2dsZS5hcGkuRGlzdHJpYnV0aW9uLkJ1Y2tldE9wdGlv",
"bnMSOAoHdmVyc2lvbhgEIAEoDjInLmdvb2dsZS5sb2dnaW5nLnYyLkxvZ01l",
"dHJpYy5BcGlWZXJzaW9uGjYKFExhYmVsRXh0cmFjdG9yc0VudHJ5EgsKA2tl",
"eRgBIAEoCRINCgV2YWx1ZRgCIAEoCToCOAEiHAoKQXBpVmVyc2lvbhIGCgJW",
"MhAAEgYKAlYxEAEiTgoVTGlzdExvZ01ldHJpY3NSZXF1ZXN0Eg4KBnBhcmVu",
"dBgBIAEoCRISCgpwYWdlX3Rva2VuGAIgASgJEhEKCXBhZ2Vfc2l6ZRgDIAEo",
"BSJgChZMaXN0TG9nTWV0cmljc1Jlc3BvbnNlEi0KB21ldHJpY3MYASADKAsy",
"HC5nb29nbGUubG9nZ2luZy52Mi5Mb2dNZXRyaWMSFwoPbmV4dF9wYWdlX3Rv",
"a2VuGAIgASgJIioKE0dldExvZ01ldHJpY1JlcXVlc3QSEwoLbWV0cmljX25h",
"bWUYASABKAkiVgoWQ3JlYXRlTG9nTWV0cmljUmVxdWVzdBIOCgZwYXJlbnQY",
"ASABKAkSLAoGbWV0cmljGAIgASgLMhwuZ29vZ2xlLmxvZ2dpbmcudjIuTG9n",
"TWV0cmljIlsKFlVwZGF0ZUxvZ01ldHJpY1JlcXVlc3QSEwoLbWV0cmljX25h",
"bWUYASABKAkSLAoGbWV0cmljGAIgASgLMhwuZ29vZ2xlLmxvZ2dpbmcudjIu",
"TG9nTWV0cmljIi0KFkRlbGV0ZUxvZ01ldHJpY1JlcXVlc3QSEwoLbWV0cmlj",
"X25hbWUYASABKAky1AUKEE1ldHJpY3NTZXJ2aWNlVjISjgEKDkxpc3RMb2dN",
"ZXRyaWNzEiguZ29vZ2xlLmxvZ2dpbmcudjIuTGlzdExvZ01ldHJpY3NSZXF1",
"ZXN0GikuZ29vZ2xlLmxvZ2dpbmcudjIuTGlzdExvZ01ldHJpY3NSZXNwb25z",
"ZSIngtPkkwIhEh8vdjIve3BhcmVudD1wcm9qZWN0cy8qfS9tZXRyaWNzEoQB",
"CgxHZXRMb2dNZXRyaWMSJi5nb29nbGUubG9nZ2luZy52Mi5HZXRMb2dNZXRy",
"aWNSZXF1ZXN0GhwuZ29vZ2xlLmxvZ2dpbmcudjIuTG9nTWV0cmljIi6C0+ST",
"AigSJi92Mi97bWV0cmljX25hbWU9cHJvamVjdHMvKi9tZXRyaWNzLyp9EosB",
"Cg9DcmVhdGVMb2dNZXRyaWMSKS5nb29nbGUubG9nZ2luZy52Mi5DcmVhdGVM",
"b2dNZXRyaWNSZXF1ZXN0GhwuZ29vZ2xlLmxvZ2dpbmcudjIuTG9nTWV0cmlj",
"Ii+C0+STAikiHy92Mi97cGFyZW50PXByb2plY3RzLyp9L21ldHJpY3M6Bm1l",
"dHJpYxKSAQoPVXBkYXRlTG9nTWV0cmljEikuZ29vZ2xlLmxvZ2dpbmcudjIu",
"VXBkYXRlTG9nTWV0cmljUmVxdWVzdBocLmdvb2dsZS5sb2dnaW5nLnYyLkxv",
"Z01ldHJpYyI2gtPkkwIwGiYvdjIve21ldHJpY19uYW1lPXByb2plY3RzLyov",
"bWV0cmljcy8qfToGbWV0cmljEoQBCg9EZWxldGVMb2dNZXRyaWMSKS5nb29n",
"bGUubG9nZ2luZy52Mi5EZWxldGVMb2dNZXRyaWNSZXF1ZXN0GhYuZ29vZ2xl",
"LnByb3RvYnVmLkVtcHR5Ii6C0+STAigqJi92Mi97bWV0cmljX25hbWU9cHJv",
"amVjdHMvKi9tZXRyaWNzLyp9Qp8BChVjb20uZ29vZ2xlLmxvZ2dpbmcudjJC",
"E0xvZ2dpbmdNZXRyaWNzUHJvdG9QAVo4Z29vZ2xlLmdvbGFuZy5vcmcvZ2Vu",
"cHJvdG8vZ29vZ2xlYXBpcy9sb2dnaW5nL3YyO2xvZ2dpbmf4AQGqAhdHb29n",
"bGUuQ2xvdWQuTG9nZ2luZy5WMsoCF0dvb2dsZVxDbG91ZFxMb2dnaW5nXFYy",
"YgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Google.Api.AnnotationsReflection.Descriptor, global::Google.Api.DistributionReflection.Descriptor, global::Google.Api.MetricReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.EmptyReflection.Descriptor, global::Google.Protobuf.WellKnownTypes.FieldMaskReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.LogMetric), global::Google.Cloud.Logging.V2.LogMetric.Parser, new[]{ "Name", "Description", "Filter", "MetricDescriptor", "ValueExtractor", "LabelExtractors", "BucketOptions", "Version" }, null, new[]{ typeof(global::Google.Cloud.Logging.V2.LogMetric.Types.ApiVersion) }, new pbr::GeneratedClrTypeInfo[] { null, }),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.ListLogMetricsRequest), global::Google.Cloud.Logging.V2.ListLogMetricsRequest.Parser, new[]{ "Parent", "PageToken", "PageSize" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.ListLogMetricsResponse), global::Google.Cloud.Logging.V2.ListLogMetricsResponse.Parser, new[]{ "Metrics", "NextPageToken" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.GetLogMetricRequest), global::Google.Cloud.Logging.V2.GetLogMetricRequest.Parser, new[]{ "MetricName" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.CreateLogMetricRequest), global::Google.Cloud.Logging.V2.CreateLogMetricRequest.Parser, new[]{ "Parent", "Metric" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.UpdateLogMetricRequest), global::Google.Cloud.Logging.V2.UpdateLogMetricRequest.Parser, new[]{ "MetricName", "Metric" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Google.Cloud.Logging.V2.DeleteLogMetricRequest), global::Google.Cloud.Logging.V2.DeleteLogMetricRequest.Parser, new[]{ "MetricName" }, null, null, null)
}));
}
#endregion
}
#region Messages
/// <summary>
/// Describes a logs-based metric. The value of the metric is the
/// number of log entries that match a logs filter in a given time interval.
///
/// Logs-based metric can also be used to extract values from logs and create a
/// a distribution of the values. The distribution records the statistics of the
/// extracted values along with an optional histogram of the values as specified
/// by the bucket options.
/// </summary>
public sealed partial class LogMetric : pb::IMessage<LogMetric> {
private static readonly pb::MessageParser<LogMetric> _parser = new pb::MessageParser<LogMetric>(() => new LogMetric());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<LogMetric> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public LogMetric() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public LogMetric(LogMetric other) : this() {
name_ = other.name_;
description_ = other.description_;
filter_ = other.filter_;
MetricDescriptor = other.metricDescriptor_ != null ? other.MetricDescriptor.Clone() : null;
valueExtractor_ = other.valueExtractor_;
labelExtractors_ = other.labelExtractors_.Clone();
BucketOptions = other.bucketOptions_ != null ? other.BucketOptions.Clone() : null;
version_ = other.version_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public LogMetric Clone() {
return new LogMetric(this);
}
/// <summary>Field number for the "name" field.</summary>
public const int NameFieldNumber = 1;
private string name_ = "";
/// <summary>
/// Required. The client-assigned metric identifier.
/// Examples: `"error_count"`, `"nginx/requests"`.
///
/// Metric identifiers are limited to 100 characters and can include
/// only the following characters: `A-Z`, `a-z`, `0-9`, and the
/// special characters `_-.,+!*',()%/`. The forward-slash character
/// (`/`) denotes a hierarchy of name pieces, and it cannot be the
/// first character of the name.
///
/// The metric identifier in this field must not be
/// [URL-encoded](https://en.wikipedia.org/wiki/Percent-encoding).
/// However, when the metric identifier appears as the `[METRIC_ID]`
/// part of a `metric_name` API parameter, then the metric identifier
/// must be URL-encoded. Example:
/// `"projects/my-project/metrics/nginx%2Frequests"`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Name {
get { return name_; }
set {
name_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "description" field.</summary>
public const int DescriptionFieldNumber = 2;
private string description_ = "";
/// <summary>
/// Optional. A description of this metric, which is used in documentation.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Description {
get { return description_; }
set {
description_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "filter" field.</summary>
public const int FilterFieldNumber = 3;
private string filter_ = "";
/// <summary>
/// Required. An [advanced logs filter](/logging/docs/view/advanced_filters)
/// which is used to match log entries.
/// Example:
///
/// "resource.type=gae_app AND severity>=ERROR"
///
/// The maximum length of the filter is 20000 characters.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Filter {
get { return filter_; }
set {
filter_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "metric_descriptor" field.</summary>
public const int MetricDescriptorFieldNumber = 5;
private global::Google.Api.MetricDescriptor metricDescriptor_;
/// <summary>
/// Optional. The metric descriptor associated with the logs-based metric.
/// If unspecified, it uses a default metric descriptor with a DELTA metric
/// kind, INT64 value type, with no labels and a unit of "1". Such a metric
/// counts the number of log entries matching the `filter` expression.
///
/// The `name`, `type`, and `description` fields in the `metric_descriptor`
/// are output only, and is constructed using the `name` and `description`
/// field in the LogMetric.
///
/// To create a logs-based metric that records a distribution of log values, a
/// DELTA metric kind with a DISTRIBUTION value type must be used along with
/// a `value_extractor` expression in the LogMetric.
///
/// Each label in the metric descriptor must have a matching label
/// name as the key and an extractor expression as the value in the
/// `label_extractors` map.
///
/// The `metric_kind` and `value_type` fields in the `metric_descriptor` cannot
/// be updated once initially configured. New labels can be added in the
/// `metric_descriptor`, but existing labels cannot be modified except for
/// their description.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Api.MetricDescriptor MetricDescriptor {
get { return metricDescriptor_; }
set {
metricDescriptor_ = value;
}
}
/// <summary>Field number for the "value_extractor" field.</summary>
public const int ValueExtractorFieldNumber = 6;
private string valueExtractor_ = "";
/// <summary>
/// Optional. A `value_extractor` is required when using a distribution
/// logs-based metric to extract the values to record from a log entry.
/// Two functions are supported for value extraction: `EXTRACT(field)` or
/// `REGEXP_EXTRACT(field, regex)`. The argument are:
/// 1. field: The name of the log entry field from which the value is to be
/// extracted.
/// 2. regex: A regular expression using the Google RE2 syntax
/// (https://github.com/google/re2/wiki/Syntax) with a single capture
/// group to extract data from the specified log entry field. The value
/// of the field is converted to a string before applying the regex.
/// It is an error to specify a regex that does not include exactly one
/// capture group.
///
/// The result of the extraction must be convertible to a double type, as the
/// distribution always records double values. If either the extraction or
/// the conversion to double fails, then those values are not recorded in the
/// distribution.
///
/// Example: `REGEXP_EXTRACT(jsonPayload.request, ".*quantity=(\d+).*")`
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string ValueExtractor {
get { return valueExtractor_; }
set {
valueExtractor_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "label_extractors" field.</summary>
public const int LabelExtractorsFieldNumber = 7;
private static readonly pbc::MapField<string, string>.Codec _map_labelExtractors_codec
= new pbc::MapField<string, string>.Codec(pb::FieldCodec.ForString(10), pb::FieldCodec.ForString(18), 58);
private readonly pbc::MapField<string, string> labelExtractors_ = new pbc::MapField<string, string>();
/// <summary>
/// Optional. A map from a label key string to an extractor expression which is
/// used to extract data from a log entry field and assign as the label value.
/// Each label key specified in the LabelDescriptor must have an associated
/// extractor expression in this map. The syntax of the extractor expression
/// is the same as for the `value_extractor` field.
///
/// The extracted value is converted to the type defined in the label
/// descriptor. If the either the extraction or the type conversion fails,
/// the label will have a default value. The default value for a string
/// label is an empty string, for an integer label its 0, and for a boolean
/// label its `false`.
///
/// Note that there are upper bounds on the maximum number of labels and the
/// number of active time series that are allowed in a project.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::MapField<string, string> LabelExtractors {
get { return labelExtractors_; }
}
/// <summary>Field number for the "bucket_options" field.</summary>
public const int BucketOptionsFieldNumber = 8;
private global::Google.Api.Distribution.Types.BucketOptions bucketOptions_;
/// <summary>
/// Optional. The `bucket_options` are required when the logs-based metric is
/// using a DISTRIBUTION value type and it describes the bucket boundaries
/// used to create a histogram of the extracted values.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Api.Distribution.Types.BucketOptions BucketOptions {
get { return bucketOptions_; }
set {
bucketOptions_ = value;
}
}
/// <summary>Field number for the "version" field.</summary>
public const int VersionFieldNumber = 4;
private global::Google.Cloud.Logging.V2.LogMetric.Types.ApiVersion version_ = 0;
/// <summary>
/// Deprecated. The API version that created or updated this metric.
/// The v2 format is used by default and cannot be changed.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Logging.V2.LogMetric.Types.ApiVersion Version {
get { return version_; }
set {
version_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as LogMetric);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(LogMetric other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Name != other.Name) return false;
if (Description != other.Description) return false;
if (Filter != other.Filter) return false;
if (!object.Equals(MetricDescriptor, other.MetricDescriptor)) return false;
if (ValueExtractor != other.ValueExtractor) return false;
if (!LabelExtractors.Equals(other.LabelExtractors)) return false;
if (!object.Equals(BucketOptions, other.BucketOptions)) return false;
if (Version != other.Version) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Name.Length != 0) hash ^= Name.GetHashCode();
if (Description.Length != 0) hash ^= Description.GetHashCode();
if (Filter.Length != 0) hash ^= Filter.GetHashCode();
if (metricDescriptor_ != null) hash ^= MetricDescriptor.GetHashCode();
if (ValueExtractor.Length != 0) hash ^= ValueExtractor.GetHashCode();
hash ^= LabelExtractors.GetHashCode();
if (bucketOptions_ != null) hash ^= BucketOptions.GetHashCode();
if (Version != 0) hash ^= Version.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Name.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Name);
}
if (Description.Length != 0) {
output.WriteRawTag(18);
output.WriteString(Description);
}
if (Filter.Length != 0) {
output.WriteRawTag(26);
output.WriteString(Filter);
}
if (Version != 0) {
output.WriteRawTag(32);
output.WriteEnum((int) Version);
}
if (metricDescriptor_ != null) {
output.WriteRawTag(42);
output.WriteMessage(MetricDescriptor);
}
if (ValueExtractor.Length != 0) {
output.WriteRawTag(50);
output.WriteString(ValueExtractor);
}
labelExtractors_.WriteTo(output, _map_labelExtractors_codec);
if (bucketOptions_ != null) {
output.WriteRawTag(66);
output.WriteMessage(BucketOptions);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Name.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Name);
}
if (Description.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Description);
}
if (Filter.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Filter);
}
if (metricDescriptor_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(MetricDescriptor);
}
if (ValueExtractor.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(ValueExtractor);
}
size += labelExtractors_.CalculateSize(_map_labelExtractors_codec);
if (bucketOptions_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(BucketOptions);
}
if (Version != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) Version);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(LogMetric other) {
if (other == null) {
return;
}
if (other.Name.Length != 0) {
Name = other.Name;
}
if (other.Description.Length != 0) {
Description = other.Description;
}
if (other.Filter.Length != 0) {
Filter = other.Filter;
}
if (other.metricDescriptor_ != null) {
if (metricDescriptor_ == null) {
metricDescriptor_ = new global::Google.Api.MetricDescriptor();
}
MetricDescriptor.MergeFrom(other.MetricDescriptor);
}
if (other.ValueExtractor.Length != 0) {
ValueExtractor = other.ValueExtractor;
}
labelExtractors_.Add(other.labelExtractors_);
if (other.bucketOptions_ != null) {
if (bucketOptions_ == null) {
bucketOptions_ = new global::Google.Api.Distribution.Types.BucketOptions();
}
BucketOptions.MergeFrom(other.BucketOptions);
}
if (other.Version != 0) {
Version = other.Version;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Name = input.ReadString();
break;
}
case 18: {
Description = input.ReadString();
break;
}
case 26: {
Filter = input.ReadString();
break;
}
case 32: {
version_ = (global::Google.Cloud.Logging.V2.LogMetric.Types.ApiVersion) input.ReadEnum();
break;
}
case 42: {
if (metricDescriptor_ == null) {
metricDescriptor_ = new global::Google.Api.MetricDescriptor();
}
input.ReadMessage(metricDescriptor_);
break;
}
case 50: {
ValueExtractor = input.ReadString();
break;
}
case 58: {
labelExtractors_.AddEntriesFrom(input, _map_labelExtractors_codec);
break;
}
case 66: {
if (bucketOptions_ == null) {
bucketOptions_ = new global::Google.Api.Distribution.Types.BucketOptions();
}
input.ReadMessage(bucketOptions_);
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the LogMetric message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
/// <summary>
/// Stackdriver Logging API version.
/// </summary>
public enum ApiVersion {
/// <summary>
/// Stackdriver Logging API v2.
/// </summary>
[pbr::OriginalName("V2")] V2 = 0,
/// <summary>
/// Stackdriver Logging API v1.
/// </summary>
[pbr::OriginalName("V1")] V1 = 1,
}
}
#endregion
}
/// <summary>
/// The parameters to ListLogMetrics.
/// </summary>
public sealed partial class ListLogMetricsRequest : pb::IMessage<ListLogMetricsRequest> {
private static readonly pb::MessageParser<ListLogMetricsRequest> _parser = new pb::MessageParser<ListLogMetricsRequest>(() => new ListLogMetricsRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<ListLogMetricsRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListLogMetricsRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListLogMetricsRequest(ListLogMetricsRequest other) : this() {
parent_ = other.parent_;
pageToken_ = other.pageToken_;
pageSize_ = other.pageSize_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListLogMetricsRequest Clone() {
return new ListLogMetricsRequest(this);
}
/// <summary>Field number for the "parent" field.</summary>
public const int ParentFieldNumber = 1;
private string parent_ = "";
/// <summary>
/// Required. The name of the project containing the metrics:
///
/// "projects/[PROJECT_ID]"
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Parent {
get { return parent_; }
set {
parent_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "page_token" field.</summary>
public const int PageTokenFieldNumber = 2;
private string pageToken_ = "";
/// <summary>
/// Optional. If present, then retrieve the next batch of results from the
/// preceding call to this method. `pageToken` must be the value of
/// `nextPageToken` from the previous response. The values of other method
/// parameters should be identical to those in the previous call.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string PageToken {
get { return pageToken_; }
set {
pageToken_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "page_size" field.</summary>
public const int PageSizeFieldNumber = 3;
private int pageSize_;
/// <summary>
/// Optional. The maximum number of results to return from this request.
/// Non-positive values are ignored. The presence of `nextPageToken` in the
/// response indicates that more results might be available.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int PageSize {
get { return pageSize_; }
set {
pageSize_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as ListLogMetricsRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(ListLogMetricsRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Parent != other.Parent) return false;
if (PageToken != other.PageToken) return false;
if (PageSize != other.PageSize) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Parent.Length != 0) hash ^= Parent.GetHashCode();
if (PageToken.Length != 0) hash ^= PageToken.GetHashCode();
if (PageSize != 0) hash ^= PageSize.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Parent.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Parent);
}
if (PageToken.Length != 0) {
output.WriteRawTag(18);
output.WriteString(PageToken);
}
if (PageSize != 0) {
output.WriteRawTag(24);
output.WriteInt32(PageSize);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Parent.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Parent);
}
if (PageToken.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(PageToken);
}
if (PageSize != 0) {
size += 1 + pb::CodedOutputStream.ComputeInt32Size(PageSize);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(ListLogMetricsRequest other) {
if (other == null) {
return;
}
if (other.Parent.Length != 0) {
Parent = other.Parent;
}
if (other.PageToken.Length != 0) {
PageToken = other.PageToken;
}
if (other.PageSize != 0) {
PageSize = other.PageSize;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Parent = input.ReadString();
break;
}
case 18: {
PageToken = input.ReadString();
break;
}
case 24: {
PageSize = input.ReadInt32();
break;
}
}
}
}
}
/// <summary>
/// Result returned from ListLogMetrics.
/// </summary>
public sealed partial class ListLogMetricsResponse : pb::IMessage<ListLogMetricsResponse> {
private static readonly pb::MessageParser<ListLogMetricsResponse> _parser = new pb::MessageParser<ListLogMetricsResponse>(() => new ListLogMetricsResponse());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<ListLogMetricsResponse> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListLogMetricsResponse() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListLogMetricsResponse(ListLogMetricsResponse other) : this() {
metrics_ = other.metrics_.Clone();
nextPageToken_ = other.nextPageToken_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public ListLogMetricsResponse Clone() {
return new ListLogMetricsResponse(this);
}
/// <summary>Field number for the "metrics" field.</summary>
public const int MetricsFieldNumber = 1;
private static readonly pb::FieldCodec<global::Google.Cloud.Logging.V2.LogMetric> _repeated_metrics_codec
= pb::FieldCodec.ForMessage(10, global::Google.Cloud.Logging.V2.LogMetric.Parser);
private readonly pbc::RepeatedField<global::Google.Cloud.Logging.V2.LogMetric> metrics_ = new pbc::RepeatedField<global::Google.Cloud.Logging.V2.LogMetric>();
/// <summary>
/// A list of logs-based metrics.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<global::Google.Cloud.Logging.V2.LogMetric> Metrics {
get { return metrics_; }
}
/// <summary>Field number for the "next_page_token" field.</summary>
public const int NextPageTokenFieldNumber = 2;
private string nextPageToken_ = "";
/// <summary>
/// If there might be more results than appear in this response, then
/// `nextPageToken` is included. To get the next set of results, call this
/// method again using the value of `nextPageToken` as `pageToken`.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string NextPageToken {
get { return nextPageToken_; }
set {
nextPageToken_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as ListLogMetricsResponse);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(ListLogMetricsResponse other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if(!metrics_.Equals(other.metrics_)) return false;
if (NextPageToken != other.NextPageToken) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
hash ^= metrics_.GetHashCode();
if (NextPageToken.Length != 0) hash ^= NextPageToken.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
metrics_.WriteTo(output, _repeated_metrics_codec);
if (NextPageToken.Length != 0) {
output.WriteRawTag(18);
output.WriteString(NextPageToken);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
size += metrics_.CalculateSize(_repeated_metrics_codec);
if (NextPageToken.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(NextPageToken);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(ListLogMetricsResponse other) {
if (other == null) {
return;
}
metrics_.Add(other.metrics_);
if (other.NextPageToken.Length != 0) {
NextPageToken = other.NextPageToken;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
metrics_.AddEntriesFrom(input, _repeated_metrics_codec);
break;
}
case 18: {
NextPageToken = input.ReadString();
break;
}
}
}
}
}
/// <summary>
/// The parameters to GetLogMetric.
/// </summary>
public sealed partial class GetLogMetricRequest : pb::IMessage<GetLogMetricRequest> {
private static readonly pb::MessageParser<GetLogMetricRequest> _parser = new pb::MessageParser<GetLogMetricRequest>(() => new GetLogMetricRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<GetLogMetricRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[3]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public GetLogMetricRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public GetLogMetricRequest(GetLogMetricRequest other) : this() {
metricName_ = other.metricName_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public GetLogMetricRequest Clone() {
return new GetLogMetricRequest(this);
}
/// <summary>Field number for the "metric_name" field.</summary>
public const int MetricNameFieldNumber = 1;
private string metricName_ = "";
/// <summary>
/// The resource name of the desired metric:
///
/// "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string MetricName {
get { return metricName_; }
set {
metricName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as GetLogMetricRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(GetLogMetricRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (MetricName != other.MetricName) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (MetricName.Length != 0) hash ^= MetricName.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (MetricName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(MetricName);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (MetricName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(MetricName);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(GetLogMetricRequest other) {
if (other == null) {
return;
}
if (other.MetricName.Length != 0) {
MetricName = other.MetricName;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
MetricName = input.ReadString();
break;
}
}
}
}
}
/// <summary>
/// The parameters to CreateLogMetric.
/// </summary>
public sealed partial class CreateLogMetricRequest : pb::IMessage<CreateLogMetricRequest> {
private static readonly pb::MessageParser<CreateLogMetricRequest> _parser = new pb::MessageParser<CreateLogMetricRequest>(() => new CreateLogMetricRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<CreateLogMetricRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[4]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CreateLogMetricRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CreateLogMetricRequest(CreateLogMetricRequest other) : this() {
parent_ = other.parent_;
Metric = other.metric_ != null ? other.Metric.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CreateLogMetricRequest Clone() {
return new CreateLogMetricRequest(this);
}
/// <summary>Field number for the "parent" field.</summary>
public const int ParentFieldNumber = 1;
private string parent_ = "";
/// <summary>
/// The resource name of the project in which to create the metric:
///
/// "projects/[PROJECT_ID]"
///
/// The new metric must be provided in the request.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Parent {
get { return parent_; }
set {
parent_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "metric" field.</summary>
public const int MetricFieldNumber = 2;
private global::Google.Cloud.Logging.V2.LogMetric metric_;
/// <summary>
/// The new logs-based metric, which must not have an identifier that
/// already exists.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Logging.V2.LogMetric Metric {
get { return metric_; }
set {
metric_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as CreateLogMetricRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(CreateLogMetricRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Parent != other.Parent) return false;
if (!object.Equals(Metric, other.Metric)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Parent.Length != 0) hash ^= Parent.GetHashCode();
if (metric_ != null) hash ^= Metric.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Parent.Length != 0) {
output.WriteRawTag(10);
output.WriteString(Parent);
}
if (metric_ != null) {
output.WriteRawTag(18);
output.WriteMessage(Metric);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Parent.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Parent);
}
if (metric_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Metric);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(CreateLogMetricRequest other) {
if (other == null) {
return;
}
if (other.Parent.Length != 0) {
Parent = other.Parent;
}
if (other.metric_ != null) {
if (metric_ == null) {
metric_ = new global::Google.Cloud.Logging.V2.LogMetric();
}
Metric.MergeFrom(other.Metric);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Parent = input.ReadString();
break;
}
case 18: {
if (metric_ == null) {
metric_ = new global::Google.Cloud.Logging.V2.LogMetric();
}
input.ReadMessage(metric_);
break;
}
}
}
}
}
/// <summary>
/// The parameters to UpdateLogMetric.
/// </summary>
public sealed partial class UpdateLogMetricRequest : pb::IMessage<UpdateLogMetricRequest> {
private static readonly pb::MessageParser<UpdateLogMetricRequest> _parser = new pb::MessageParser<UpdateLogMetricRequest>(() => new UpdateLogMetricRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<UpdateLogMetricRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[5]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UpdateLogMetricRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UpdateLogMetricRequest(UpdateLogMetricRequest other) : this() {
metricName_ = other.metricName_;
Metric = other.metric_ != null ? other.Metric.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public UpdateLogMetricRequest Clone() {
return new UpdateLogMetricRequest(this);
}
/// <summary>Field number for the "metric_name" field.</summary>
public const int MetricNameFieldNumber = 1;
private string metricName_ = "";
/// <summary>
/// The resource name of the metric to update:
///
/// "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
///
/// The updated metric must be provided in the request and it's
/// `name` field must be the same as `[METRIC_ID]` If the metric
/// does not exist in `[PROJECT_ID]`, then a new metric is created.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string MetricName {
get { return metricName_; }
set {
metricName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "metric" field.</summary>
public const int MetricFieldNumber = 2;
private global::Google.Cloud.Logging.V2.LogMetric metric_;
/// <summary>
/// The updated metric.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Google.Cloud.Logging.V2.LogMetric Metric {
get { return metric_; }
set {
metric_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as UpdateLogMetricRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(UpdateLogMetricRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (MetricName != other.MetricName) return false;
if (!object.Equals(Metric, other.Metric)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (MetricName.Length != 0) hash ^= MetricName.GetHashCode();
if (metric_ != null) hash ^= Metric.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (MetricName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(MetricName);
}
if (metric_ != null) {
output.WriteRawTag(18);
output.WriteMessage(Metric);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (MetricName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(MetricName);
}
if (metric_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Metric);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(UpdateLogMetricRequest other) {
if (other == null) {
return;
}
if (other.MetricName.Length != 0) {
MetricName = other.MetricName;
}
if (other.metric_ != null) {
if (metric_ == null) {
metric_ = new global::Google.Cloud.Logging.V2.LogMetric();
}
Metric.MergeFrom(other.Metric);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
MetricName = input.ReadString();
break;
}
case 18: {
if (metric_ == null) {
metric_ = new global::Google.Cloud.Logging.V2.LogMetric();
}
input.ReadMessage(metric_);
break;
}
}
}
}
}
/// <summary>
/// The parameters to DeleteLogMetric.
/// </summary>
public sealed partial class DeleteLogMetricRequest : pb::IMessage<DeleteLogMetricRequest> {
private static readonly pb::MessageParser<DeleteLogMetricRequest> _parser = new pb::MessageParser<DeleteLogMetricRequest>(() => new DeleteLogMetricRequest());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<DeleteLogMetricRequest> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Google.Cloud.Logging.V2.LoggingMetricsReflection.Descriptor.MessageTypes[6]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DeleteLogMetricRequest() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DeleteLogMetricRequest(DeleteLogMetricRequest other) : this() {
metricName_ = other.metricName_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DeleteLogMetricRequest Clone() {
return new DeleteLogMetricRequest(this);
}
/// <summary>Field number for the "metric_name" field.</summary>
public const int MetricNameFieldNumber = 1;
private string metricName_ = "";
/// <summary>
/// The resource name of the metric to delete:
///
/// "projects/[PROJECT_ID]/metrics/[METRIC_ID]"
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string MetricName {
get { return metricName_; }
set {
metricName_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as DeleteLogMetricRequest);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(DeleteLogMetricRequest other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (MetricName != other.MetricName) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (MetricName.Length != 0) hash ^= MetricName.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (MetricName.Length != 0) {
output.WriteRawTag(10);
output.WriteString(MetricName);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (MetricName.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(MetricName);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(DeleteLogMetricRequest other) {
if (other == null) {
return;
}
if (other.MetricName.Length != 0) {
MetricName = other.MetricName;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
MetricName = input.ReadString();
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// Copyright (c) ppy Pty Ltd <[email protected]>. Licensed under the MIT Licence.
// See the LICENCE file in the repository root for full licence text.
using osu.Framework.Allocation;
using osu.Framework.Bindables;
using osu.Framework.Graphics;
using osu.Framework.Graphics.Containers;
using osu.Framework.Graphics.Shapes;
using osu.Framework.Input.Events;
using osu.Game.Graphics;
using osu.Game.Graphics.UserInterface;
using osu.Game.Tournament.Components;
using osu.Game.Tournament.Models;
using osu.Game.Tournament.Screens.Showcase;
using osuTK;
using osuTK.Graphics;
using osuTK.Input;
namespace osu.Game.Tournament.Screens.Gameplay.Components
{
public class MatchHeader : Container
{
[BackgroundDependencyLoader]
private void load()
{
RelativeSizeAxes = Axes.X;
Height = 95;
Children = new Drawable[]
{
new TournamentLogo(),
new RoundDisplay
{
Y = 5,
Anchor = Anchor.BottomCentre,
Origin = Anchor.TopCentre,
},
new TeamScoreDisplay(TeamColour.Red)
{
Anchor = Anchor.TopLeft,
Origin = Anchor.TopLeft,
},
new TeamScoreDisplay(TeamColour.Blue)
{
Anchor = Anchor.TopRight,
Origin = Anchor.TopRight,
},
};
}
private class TeamScoreDisplay : CompositeDrawable
{
private readonly TeamColour teamColour;
private readonly Bindable<TournamentMatch> currentMatch = new Bindable<TournamentMatch>();
private readonly Bindable<TournamentTeam> currentTeam = new Bindable<TournamentTeam>();
private readonly Bindable<int?> currentTeamScore = new Bindable<int?>();
public TeamScoreDisplay(TeamColour teamColour)
{
this.teamColour = teamColour;
RelativeSizeAxes = Axes.Y;
Width = 300;
}
[BackgroundDependencyLoader]
private void load(LadderInfo ladder)
{
currentMatch.BindValueChanged(matchChanged);
currentMatch.BindTo(ladder.CurrentMatch);
}
private void matchChanged(ValueChangedEvent<TournamentMatch> match)
{
currentTeamScore.UnbindBindings();
currentTeamScore.BindTo(teamColour == TeamColour.Red ? match.NewValue.Team1Score : match.NewValue.Team2Score);
currentTeam.UnbindBindings();
currentTeam.BindTo(teamColour == TeamColour.Red ? match.NewValue.Team1 : match.NewValue.Team2);
// team may change to same team, which means score is not in a good state.
// thus we handle this manually.
teamChanged(currentTeam.Value);
}
protected override bool OnMouseDown(MouseDownEvent e)
{
switch (e.Button)
{
case MouseButton.Left:
if (currentTeamScore.Value < currentMatch.Value.PointsToWin)
currentTeamScore.Value++;
return true;
case MouseButton.Right:
if (currentTeamScore.Value > 0)
currentTeamScore.Value--;
return true;
}
return base.OnMouseDown(e);
}
private void teamChanged(TournamentTeam team)
{
var colour = teamColour == TeamColour.Red ? TournamentGame.COLOUR_RED : TournamentGame.COLOUR_BLUE;
var flip = teamColour != TeamColour.Red;
InternalChildren = new Drawable[]
{
new TeamDisplay(team, colour, flip),
new TeamScore(currentTeamScore, flip, currentMatch.Value.PointsToWin)
{
Colour = colour
}
};
}
}
private class TeamScore : CompositeDrawable
{
private readonly Bindable<int?> currentTeamScore = new Bindable<int?>();
private readonly StarCounter counter;
public TeamScore(Bindable<int?> score, bool flip, int count)
{
var anchor = flip ? Anchor.CentreRight : Anchor.CentreLeft;
Anchor = anchor;
Origin = anchor;
InternalChild = counter = new StarCounter(count)
{
Anchor = anchor,
X = (flip ? -1 : 1) * 90,
Y = 5,
Scale = flip ? new Vector2(-1, 1) : Vector2.One,
};
currentTeamScore.BindValueChanged(scoreChanged);
currentTeamScore.BindTo(score);
}
private void scoreChanged(ValueChangedEvent<int?> score) => counter.CountStars = score.NewValue ?? 0;
}
private class TeamDisplay : DrawableTournamentTeam
{
public TeamDisplay(TournamentTeam team, Color4 colour, bool flip)
: base(team)
{
RelativeSizeAxes = Axes.Both;
var anchor = flip ? Anchor.CentreRight : Anchor.CentreLeft;
Anchor = Origin = anchor;
Flag.Anchor = Flag.Origin = anchor;
Flag.RelativeSizeAxes = Axes.None;
Flag.Size = new Vector2(60, 40);
Flag.Margin = new MarginPadding(20);
InternalChild = new Container
{
RelativeSizeAxes = Axes.Both,
Children = new Drawable[]
{
Flag,
new TournamentSpriteText
{
Text = team?.FullName.Value.ToUpper() ?? "???",
X = (flip ? -1 : 1) * 90,
Y = -10,
Colour = colour,
Font = OsuFont.Torus.With(weight: FontWeight.Regular, size: 20),
Origin = anchor,
Anchor = anchor,
},
}
};
}
}
private class RoundDisplay : CompositeDrawable
{
private readonly Bindable<TournamentMatch> currentMatch = new Bindable<TournamentMatch>();
private readonly TournamentSpriteText text;
public RoundDisplay()
{
Width = 200;
Height = 20;
Masking = true;
CornerRadius = 10;
InternalChildren = new Drawable[]
{
new Box
{
Colour = OsuColour.Gray(0.18f),
RelativeSizeAxes = Axes.Both,
},
text = new TournamentSpriteText
{
Anchor = Anchor.Centre,
Origin = Anchor.Centre,
Colour = Color4.White,
Font = OsuFont.Torus.With(weight: FontWeight.Regular, size: 16),
},
};
}
[BackgroundDependencyLoader]
private void load(LadderInfo ladder)
{
currentMatch.BindValueChanged(matchChanged);
currentMatch.BindTo(ladder.CurrentMatch);
}
private void matchChanged(ValueChangedEvent<TournamentMatch> match) =>
text.Text = match.NewValue.Round.Value?.Name.Value ?? "Unknown Round";
}
}
}
| |
using System;
using System.Runtime.InteropServices;
using SQLite.Net.Interop;
namespace SQLite.Net.Platform.OSX
{
public class SQLiteApiOSX : ISQLiteApiExt
{
public Result Open(byte[] filename, out IDbHandle db, int flags, IntPtr zvfs)
{
IntPtr dbPtr;
Result r = SQLiteApiOSXInternal.sqlite3_open_v2(filename, out dbPtr, flags, zvfs);
db = new DbHandle(dbPtr);
return r;
}
public ExtendedResult ExtendedErrCode(IDbHandle db)
{
var internalDbHandle = (DbHandle) db;
return SQLiteApiOSXInternal.sqlite3_extended_errcode(internalDbHandle.DbPtr);
}
public int LibVersionNumber()
{
return SQLiteApiOSXInternal.sqlite3_libversion_number();
}
public string SourceID()
{
return Marshal.PtrToStringAuto(SQLiteApiOSXInternal.sqlite3_sourceid());
}
public Result EnableLoadExtension(IDbHandle db, int onoff)
{
var internalDbHandle = (DbHandle) db;
return SQLiteApiOSXInternal.sqlite3_enable_load_extension(internalDbHandle.DbPtr, onoff);
}
public Result Close(IDbHandle db)
{
var internalDbHandle = (DbHandle) db;
return SQLiteApiOSXInternal.sqlite3_close(internalDbHandle.DbPtr);
}
public Result Initialize()
{
return SQLiteApiOSXInternal.sqlite3_initialize();
}
public Result Shutdown()
{
return SQLiteApiOSXInternal.sqlite3_shutdown();
}
public Result Config(ConfigOption option)
{
return SQLiteApiOSXInternal.sqlite3_config(option);
}
public Result BusyTimeout(IDbHandle db, int milliseconds)
{
var internalDbHandle = (DbHandle) db;
return SQLiteApiOSXInternal.sqlite3_busy_timeout(internalDbHandle.DbPtr, milliseconds);
}
public int Changes(IDbHandle db)
{
var internalDbHandle = (DbHandle) db;
return SQLiteApiOSXInternal.sqlite3_changes(internalDbHandle.DbPtr);
}
public IDbStatement Prepare2(IDbHandle db, string query)
{
var internalDbHandle = (DbHandle) db;
IntPtr stmt;
Result r = SQLiteApiOSXInternal.sqlite3_prepare_v2(internalDbHandle.DbPtr, query, query.Length, out stmt, IntPtr.Zero);
if (r != Result.OK)
{
throw SQLiteException.New(r, Errmsg16(internalDbHandle));
}
return new DbStatement(stmt);
}
public Result Step(IDbStatement stmt)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_step(internalStmt.StmtPtr);
}
public Result Reset(IDbStatement stmt)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_reset(internalStmt.StmtPtr);
}
public Result Finalize(IDbStatement stmt)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_finalize(internalStmt.StmtPtr);
}
public long LastInsertRowid(IDbHandle db)
{
var internalDbHandle = (DbHandle) db;
return SQLiteApiOSXInternal.sqlite3_last_insert_rowid(internalDbHandle.DbPtr);
}
public string Errmsg16(IDbHandle db)
{
var internalDbHandle = (DbHandle) db;
return Marshal.PtrToStringUni(SQLiteApiOSXInternal.sqlite3_errmsg16(internalDbHandle.DbPtr));
}
public int BindParameterIndex(IDbStatement stmt, string name)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_parameter_index(internalStmt.StmtPtr, name);
}
public int BindNull(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_null(internalStmt.StmtPtr, index);
}
public int BindInt(IDbStatement stmt, int index, int val)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_int(internalStmt.StmtPtr, index, val);
}
public int BindInt64(IDbStatement stmt, int index, long val)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_int64(internalStmt.StmtPtr, index, val);
}
public int BindDouble(IDbStatement stmt, int index, double val)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_double(internalStmt.StmtPtr, index, val);
}
public int BindText16(IDbStatement stmt, int index, string val, int n, IntPtr free)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_text16(internalStmt.StmtPtr, index, val, n, free);
}
public int BindBlob(IDbStatement stmt, int index, byte[] val, int n, IntPtr free)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_bind_blob(internalStmt.StmtPtr, index, val, n, free);
}
public int ColumnCount(IDbStatement stmt)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_column_count(internalStmt.StmtPtr);
}
public string ColumnName16(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.ColumnName16(internalStmt.StmtPtr, index);
}
public ColType ColumnType(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_column_type(internalStmt.StmtPtr, index);
}
public int ColumnInt(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_column_int(internalStmt.StmtPtr, index);
}
public long ColumnInt64(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_column_int64(internalStmt.StmtPtr, index);
}
public double ColumnDouble(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_column_double(internalStmt.StmtPtr, index);
}
public string ColumnText16(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return Marshal.PtrToStringUni(SQLiteApiOSXInternal.sqlite3_column_text16(internalStmt.StmtPtr, index));
}
public byte[] ColumnBlob(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.ColumnBlob(internalStmt.StmtPtr, index);
}
public int ColumnBytes(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.sqlite3_column_bytes(internalStmt.StmtPtr, index);
}
public byte[] ColumnByteArray(IDbStatement stmt, int index)
{
var internalStmt = (DbStatement) stmt;
return SQLiteApiOSXInternal.ColumnByteArray(internalStmt.StmtPtr, index);
}
#region Backup
public IDbBackupHandle BackupInit(IDbHandle destHandle, string destName, IDbHandle srcHandle, string srcName) {
var internalDestDb = (DbHandle)destHandle;
var internalSrcDb = (DbHandle)srcHandle;
IntPtr p = SQLiteApiOSXInternal.sqlite3_backup_init(internalDestDb.DbPtr,
destName,
internalSrcDb.DbPtr,
srcName);
if(p == IntPtr.Zero) {
return null;
} else {
return new DbBackupHandle(p);
}
}
public Result BackupStep(IDbBackupHandle handle, int pageCount) {
var internalBackup = (DbBackupHandle)handle;
return SQLiteApiOSXInternal.sqlite3_backup_step(internalBackup.DbBackupPtr, pageCount);
}
public Result BackupFinish(IDbBackupHandle handle) {
var internalBackup = (DbBackupHandle)handle;
return SQLiteApiOSXInternal.sqlite3_backup_finish(internalBackup.DbBackupPtr);
}
public int BackupRemaining(IDbBackupHandle handle) {
var internalBackup = (DbBackupHandle)handle;
return SQLiteApiOSXInternal.sqlite3_backup_remaining(internalBackup.DbBackupPtr);
}
public int BackupPagecount(IDbBackupHandle handle) {
var internalBackup = (DbBackupHandle)handle;
return SQLiteApiOSXInternal.sqlite3_backup_pagecount(internalBackup.DbBackupPtr);
}
public int Sleep(int millis) {
return SQLiteApiOSXInternal.sqlite3_sleep(millis);
}
private struct DbBackupHandle : IDbBackupHandle {
public DbBackupHandle(IntPtr dbBackupPtr) : this() {
DbBackupPtr = dbBackupPtr;
}
internal IntPtr DbBackupPtr { get; set; }
public bool Equals(IDbBackupHandle other) {
return other is DbBackupHandle && DbBackupPtr == ((DbBackupHandle)other).DbBackupPtr;
}
}
#endregion
private struct DbHandle : IDbHandle
{
public DbHandle(IntPtr dbPtr) : this()
{
DbPtr = dbPtr;
}
internal IntPtr DbPtr { get; set; }
public bool Equals(IDbHandle other)
{
return other is DbHandle && DbPtr == ((DbHandle) other).DbPtr;
}
}
private struct DbStatement : IDbStatement
{
public DbStatement(IntPtr stmtPtr) : this()
{
StmtPtr = stmtPtr;
}
internal IntPtr StmtPtr { get; set; }
public bool Equals(IDbStatement other)
{
return other is DbStatement && StmtPtr == ((DbStatement) other).StmtPtr;
}
}
}
}
| |
using Discord;
using Discord.Commands;
using ImageSharp;
using NadekoBot.Attributes;
using NadekoBot.Extensions;
using NadekoBot.Services;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Image = ImageSharp.Image;
namespace NadekoBot.Modules.Gambling
{
public partial class Gambling
{
[Group]
public class DriceRollCommands : ModuleBase
{
private Regex dndRegex { get; } = new Regex(@"^(?<n1>\d+)d(?<n2>\d+)(?:\+(?<add>\d+))?(?:\-(?<sub>\d+))?$", RegexOptions.Compiled);
private Regex fudgeRegex { get; } = new Regex(@"^(?<n1>\d+)d(?:F|f)$", RegexOptions.Compiled);
private readonly char[] fateRolls = new[] { '-', ' ', '+' };
[NadekoCommand, Usage, Description, Aliases]
public async Task Roll()
{
var rng = new NadekoRandom();
var gen = rng.Next(1, 101);
var num1 = gen / 10;
var num2 = gen % 10;
var imageStream = await Task.Run(() =>
{
var ms = new MemoryStream();
new[] { GetDice(num1), GetDice(num2) }.Merge().SaveAsPng(ms);
ms.Position = 0;
return ms;
}).ConfigureAwait(false);
await Context.Channel.SendFileAsync(imageStream, "dice.png", $"{Context.User.Mention} rolled " + Format.Code(gen.ToString())).ConfigureAwait(false);
}
public enum RollOrderType
{
Ordered,
Unordered
}
[NadekoCommand, Usage, Description, Aliases]
[Priority(0)]
public async Task Roll(int num)
{
await InternalRoll(num, true).ConfigureAwait(false);
}
[NadekoCommand, Usage, Description, Aliases]
[Priority(0)]
public async Task Rolluo(int num)
{
await InternalRoll(num, false).ConfigureAwait(false);
}
[NadekoCommand, Usage, Description, Aliases]
[Priority(1)]
public async Task Roll(string arg)
{
await InternallDndRoll(arg, true).ConfigureAwait(false);
}
[NadekoCommand, Usage, Description, Aliases]
[Priority(1)]
public async Task Rolluo(string arg)
{
await InternallDndRoll(arg, false).ConfigureAwait(false);
}
private async Task InternalRoll(int num, bool ordered)
{
if (num < 1 || num > 30)
{
await Context.Channel.SendErrorAsync("Invalid number specified. You can roll up to 1-30 dice at a time.").ConfigureAwait(false);
return;
}
var rng = new NadekoRandom();
var dice = new List<Image>(num);
var values = new List<int>(num);
for (var i = 0; i < num; i++)
{
var randomNumber = rng.Next(1, 7);
var toInsert = dice.Count;
if (ordered)
{
if (randomNumber == 6 || dice.Count == 0)
toInsert = 0;
else if (randomNumber != 1)
for (var j = 0; j < dice.Count; j++)
{
if (values[j] < randomNumber)
{
toInsert = j;
break;
}
}
}
else
{
toInsert = dice.Count;
}
dice.Insert(toInsert, GetDice(randomNumber));
values.Insert(toInsert, randomNumber);
}
var bitmap = dice.Merge();
var ms = new MemoryStream();
bitmap.SaveAsPng(ms);
ms.Position = 0;
await Context.Channel.SendFileAsync(ms, "dice.png", $"{Context.User.Mention} rolled {values.Count} {(values.Count == 1 ? "die" : "dice")}. Total: **{values.Sum()}** Average: **{(values.Sum() / (1.0f * values.Count)).ToString("N2")}**").ConfigureAwait(false);
}
private async Task InternallDndRoll(string arg, bool ordered)
{
Match match;
int n1;
int n2;
if ((match = fudgeRegex.Match(arg)).Length != 0 &&
int.TryParse(match.Groups["n1"].ToString(), out n1) &&
n1 > 0 && n1 < 500)
{
var rng = new NadekoRandom();
var rolls = new List<char>();
for (int i = 0; i < n1; i++)
{
rolls.Add(fateRolls[rng.Next(0, fateRolls.Length)]);
}
var embed = new EmbedBuilder().WithOkColor().WithDescription($"{Context.User.Mention} rolled {n1} fate {(n1 == 1 ? "die" : "dice")}.")
.AddField(efb => efb.WithName(Format.Bold("Result"))
.WithValue(string.Join(" ", rolls.Select(c => Format.Code($"[{c}]")))));
await Context.Channel.EmbedAsync(embed).ConfigureAwait(false);
}
else if ((match = dndRegex.Match(arg)).Length != 0)
{
var rng = new NadekoRandom();
if (int.TryParse(match.Groups["n1"].ToString(), out n1) &&
int.TryParse(match.Groups["n2"].ToString(), out n2) &&
n1 <= 50 && n2 <= 100000 && n1 > 0 && n2 > 0)
{
var add = 0;
var sub = 0;
int.TryParse(match.Groups["add"].Value, out add);
int.TryParse(match.Groups["sub"].Value, out sub);
var arr = new int[n1];
for (int i = 0; i < n1; i++)
{
arr[i] = rng.Next(1, n2 + 1);
}
var sum = arr.Sum();
var embed = new EmbedBuilder().WithOkColor().WithDescription($"{Context.User.Mention} rolled {n1} {(n1 == 1 ? "die" : "dice")} `1 to {n2}`")
.AddField(efb => efb.WithName(Format.Bold("Rolls"))
.WithValue(string.Join(" ", (ordered ? arr.OrderBy(x => x).AsEnumerable() : arr).Select(x => Format.Code(x.ToString())))))
.AddField(efb => efb.WithName(Format.Bold("Sum"))
.WithValue(sum + " + " + add + " - " + sub + " = " + (sum + add - sub)));
await Context.Channel.EmbedAsync(embed).ConfigureAwait(false);
}
}
}
[NadekoCommand, Usage, Description, Aliases]
public async Task NRoll([Remainder] string range)
{
try
{
int rolled;
if (range.Contains("-"))
{
var arr = range.Split('-')
.Take(2)
.Select(int.Parse)
.ToArray();
if (arr[0] > arr[1])
throw new ArgumentException("Second argument must be larger than the first one.");
rolled = new NadekoRandom().Next(arr[0], arr[1] + 1);
}
else
{
rolled = new NadekoRandom().Next(0, int.Parse(range) + 1);
}
await Context.Channel.SendConfirmAsync($"{Context.User.Mention} rolled **{rolled}**.").ConfigureAwait(false);
}
catch (Exception ex)
{
await Context.Channel.SendErrorAsync($":anger: {ex.Message}").ConfigureAwait(false);
}
}
private Image GetDice(int num)
{
if (num < 0 || num > 10)
throw new ArgumentOutOfRangeException(nameof(num));
if (num == 10)
{
var images = NadekoBot.Images.Dice;
using (var imgOneStream = images[1].Value.ToStream())
using (var imgZeroStream = images[0].Value.ToStream())
{
Image imgOne = new Image(imgOneStream);
Image imgZero = new Image(imgZeroStream);
return new[] { imgOne, imgZero }.Merge();
}
}
using (var die = NadekoBot.Images.Dice[num].Value.ToStream())
{
return new Image(die);
}
}
}
}
}
| |
namespace fyiReporting.RdlDesign
{
partial class StaticSeriesCtl
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(StaticSeriesCtl));
this.label1 = new System.Windows.Forms.Label();
this.lbDataSeries = new System.Windows.Forms.ListBox();
this.label2 = new System.Windows.Forms.Label();
this.label3 = new System.Windows.Forms.Label();
this.chkShowLabels = new System.Windows.Forms.CheckBox();
this.txtSeriesName = new System.Windows.Forms.TextBox();
this.txtLabelValue = new System.Windows.Forms.TextBox();
this.btnAdd = new System.Windows.Forms.Button();
this.btnDel = new System.Windows.Forms.Button();
this.btnLabelValue = new System.Windows.Forms.Button();
this.btnDataValue = new System.Windows.Forms.Button();
this.btnSeriesName = new System.Windows.Forms.Button();
this.txtDataValue = new System.Windows.Forms.TextBox();
this.label4 = new System.Windows.Forms.Label();
this.cbPlotType = new System.Windows.Forms.ComboBox();
this.chkLeft = new System.Windows.Forms.RadioButton();
this.chkRight = new System.Windows.Forms.RadioButton();
this.label5 = new System.Windows.Forms.Label();
this.btnUp = new System.Windows.Forms.Button();
this.btnDown = new System.Windows.Forms.Button();
this.txtX = new System.Windows.Forms.TextBox();
this.label6 = new System.Windows.Forms.Label();
this.btnX = new System.Windows.Forms.Button();
this.chkMarker = new System.Windows.Forms.CheckBox();
this.label7 = new System.Windows.Forms.Label();
this.cbLine = new System.Windows.Forms.ComboBox();
this.label8 = new System.Windows.Forms.Label();
this.colorPicker1 = new fyiReporting.RdlDesign.ColorPicker();
this.SuspendLayout();
//
// label1
//
resources.ApplyResources(this.label1, "label1");
this.label1.Name = "label1";
//
// lbDataSeries
//
this.lbDataSeries.FormattingEnabled = true;
resources.ApplyResources(this.lbDataSeries, "lbDataSeries");
this.lbDataSeries.Name = "lbDataSeries";
this.lbDataSeries.SelectedIndexChanged += new System.EventHandler(this.lbDataSeries_SelectedIndexChanged);
//
// label2
//
resources.ApplyResources(this.label2, "label2");
this.label2.Name = "label2";
//
// label3
//
resources.ApplyResources(this.label3, "label3");
this.label3.Name = "label3";
//
// chkShowLabels
//
resources.ApplyResources(this.chkShowLabels, "chkShowLabels");
this.chkShowLabels.Name = "chkShowLabels";
this.chkShowLabels.UseVisualStyleBackColor = true;
this.chkShowLabels.CheckedChanged += new System.EventHandler(this.chkShowLabels_CheckedChanged);
//
// txtSeriesName
//
resources.ApplyResources(this.txtSeriesName, "txtSeriesName");
this.txtSeriesName.Name = "txtSeriesName";
this.txtSeriesName.TextChanged += new System.EventHandler(this.txtSeriesName_TextChanged);
//
// txtLabelValue
//
resources.ApplyResources(this.txtLabelValue, "txtLabelValue");
this.txtLabelValue.Name = "txtLabelValue";
this.txtLabelValue.TextChanged += new System.EventHandler(this.txtLabelValue_TextChanged);
//
// btnAdd
//
resources.ApplyResources(this.btnAdd, "btnAdd");
this.btnAdd.Name = "btnAdd";
this.btnAdd.UseVisualStyleBackColor = true;
this.btnAdd.Click += new System.EventHandler(this.btnAdd_Click);
//
// btnDel
//
resources.ApplyResources(this.btnDel, "btnDel");
this.btnDel.Name = "btnDel";
this.btnDel.UseVisualStyleBackColor = true;
this.btnDel.Click += new System.EventHandler(this.btnDel_Click);
//
// btnLabelValue
//
resources.ApplyResources(this.btnLabelValue, "btnLabelValue");
this.btnLabelValue.Name = "btnLabelValue";
this.btnLabelValue.UseVisualStyleBackColor = true;
this.btnLabelValue.Click += new System.EventHandler(this.FunctionButtonClick);
//
// btnDataValue
//
resources.ApplyResources(this.btnDataValue, "btnDataValue");
this.btnDataValue.Name = "btnDataValue";
this.btnDataValue.UseVisualStyleBackColor = true;
this.btnDataValue.Click += new System.EventHandler(this.FunctionButtonClick);
//
// btnSeriesName
//
resources.ApplyResources(this.btnSeriesName, "btnSeriesName");
this.btnSeriesName.Name = "btnSeriesName";
this.btnSeriesName.UseVisualStyleBackColor = true;
this.btnSeriesName.Click += new System.EventHandler(this.FunctionButtonClick);
//
// txtDataValue
//
resources.ApplyResources(this.txtDataValue, "txtDataValue");
this.txtDataValue.Name = "txtDataValue";
this.txtDataValue.TextChanged += new System.EventHandler(this.txtDataValue_TextChanged);
//
// label4
//
resources.ApplyResources(this.label4, "label4");
this.label4.Name = "label4";
//
// cbPlotType
//
this.cbPlotType.FormattingEnabled = true;
this.cbPlotType.Items.AddRange(new object[] {
resources.GetString("cbPlotType.Items"),
resources.GetString("cbPlotType.Items1")});
resources.ApplyResources(this.cbPlotType, "cbPlotType");
this.cbPlotType.Name = "cbPlotType";
this.cbPlotType.SelectedIndexChanged += new System.EventHandler(this.cbPlotType_SelectedIndexChanged);
//
// chkLeft
//
resources.ApplyResources(this.chkLeft, "chkLeft");
this.chkLeft.Name = "chkLeft";
this.chkLeft.TabStop = true;
this.chkLeft.UseVisualStyleBackColor = true;
this.chkLeft.CheckedChanged += new System.EventHandler(this.chkLeft_CheckedChanged);
//
// chkRight
//
resources.ApplyResources(this.chkRight, "chkRight");
this.chkRight.Name = "chkRight";
this.chkRight.TabStop = true;
this.chkRight.UseVisualStyleBackColor = true;
//
// label5
//
resources.ApplyResources(this.label5, "label5");
this.label5.Name = "label5";
//
// btnUp
//
resources.ApplyResources(this.btnUp, "btnUp");
this.btnUp.Name = "btnUp";
this.btnUp.UseVisualStyleBackColor = true;
this.btnUp.Click += new System.EventHandler(this.btnUp_Click);
//
// btnDown
//
resources.ApplyResources(this.btnDown, "btnDown");
this.btnDown.Name = "btnDown";
this.btnDown.UseVisualStyleBackColor = true;
this.btnDown.Click += new System.EventHandler(this.btnDown_Click);
//
// txtX
//
resources.ApplyResources(this.txtX, "txtX");
this.txtX.Name = "txtX";
this.txtX.TextChanged += new System.EventHandler(this.txtX_TextChanged);
//
// label6
//
resources.ApplyResources(this.label6, "label6");
this.label6.Name = "label6";
//
// btnX
//
resources.ApplyResources(this.btnX, "btnX");
this.btnX.Name = "btnX";
this.btnX.UseVisualStyleBackColor = true;
this.btnX.Click += new System.EventHandler(this.FunctionButtonClick);
//
// chkMarker
//
resources.ApplyResources(this.chkMarker, "chkMarker");
this.chkMarker.Name = "chkMarker";
this.chkMarker.UseVisualStyleBackColor = true;
this.chkMarker.CheckedChanged += new System.EventHandler(this.chkMarker_CheckedChanged);
//
// label7
//
resources.ApplyResources(this.label7, "label7");
this.label7.Name = "label7";
//
// cbLine
//
this.cbLine.FormattingEnabled = true;
this.cbLine.Items.AddRange(new object[] {
resources.GetString("cbLine.Items"),
resources.GetString("cbLine.Items1"),
resources.GetString("cbLine.Items2"),
resources.GetString("cbLine.Items3"),
resources.GetString("cbLine.Items4")});
resources.ApplyResources(this.cbLine, "cbLine");
this.cbLine.Name = "cbLine";
this.cbLine.SelectedIndexChanged += new System.EventHandler(this.cbLine_SelectedIndexChanged);
//
// label8
//
resources.ApplyResources(this.label8, "label8");
this.label8.Name = "label8";
//
// colorPicker1
//
this.colorPicker1.DrawMode = System.Windows.Forms.DrawMode.OwnerDrawFixed;
this.colorPicker1.DropDownHeight = 1;
this.colorPicker1.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
resources.ApplyResources(this.colorPicker1, "colorPicker1");
this.colorPicker1.FormattingEnabled = true;
this.colorPicker1.Name = "colorPicker1";
this.colorPicker1.SelectedIndexChanged += new System.EventHandler(this.colorPicker1_SelectedIndexChanged);
//
// StaticSeriesCtl
//
resources.ApplyResources(this, "$this");
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.Controls.Add(this.label8);
this.Controls.Add(this.colorPicker1);
this.Controls.Add(this.cbLine);
this.Controls.Add(this.label7);
this.Controls.Add(this.chkMarker);
this.Controls.Add(this.btnX);
this.Controls.Add(this.label6);
this.Controls.Add(this.txtX);
this.Controls.Add(this.btnDown);
this.Controls.Add(this.btnUp);
this.Controls.Add(this.label5);
this.Controls.Add(this.chkRight);
this.Controls.Add(this.chkLeft);
this.Controls.Add(this.cbPlotType);
this.Controls.Add(this.label4);
this.Controls.Add(this.txtDataValue);
this.Controls.Add(this.btnSeriesName);
this.Controls.Add(this.btnDataValue);
this.Controls.Add(this.btnLabelValue);
this.Controls.Add(this.btnDel);
this.Controls.Add(this.btnAdd);
this.Controls.Add(this.txtLabelValue);
this.Controls.Add(this.txtSeriesName);
this.Controls.Add(this.chkShowLabels);
this.Controls.Add(this.label3);
this.Controls.Add(this.label2);
this.Controls.Add(this.lbDataSeries);
this.Controls.Add(this.label1);
this.Name = "StaticSeriesCtl";
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label label1;
private System.Windows.Forms.ListBox lbDataSeries;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.CheckBox chkShowLabels;
private System.Windows.Forms.TextBox txtSeriesName;
private System.Windows.Forms.TextBox txtLabelValue;
private System.Windows.Forms.Button btnAdd;
private System.Windows.Forms.Button btnDel;
private System.Windows.Forms.Button btnLabelValue;
private System.Windows.Forms.Button btnDataValue;
private System.Windows.Forms.Button btnSeriesName;
private System.Windows.Forms.TextBox txtDataValue;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.ComboBox cbPlotType;
private System.Windows.Forms.RadioButton chkLeft;
private System.Windows.Forms.RadioButton chkRight;
private System.Windows.Forms.Label label5;
private System.Windows.Forms.Button btnUp;
private System.Windows.Forms.Button btnDown;
private System.Windows.Forms.TextBox txtX;
private System.Windows.Forms.Label label6;
private System.Windows.Forms.Button btnX;
private System.Windows.Forms.CheckBox chkMarker;
private System.Windows.Forms.Label label7;
private System.Windows.Forms.ComboBox cbLine;
private ColorPicker colorPicker1;
private System.Windows.Forms.Label label8;
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#else
using System.Linq;
#endif
using System.Runtime.Serialization;
using System.Text;
using System.Xml;
#if NETFX_CORE
using Microsoft.VisualStudio.TestPlatform.UnitTestFramework;
using TestFixture = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute;
using Test = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute;
#elif ASPNETCORE50
using Xunit;
using Test = Xunit.FactAttribute;
using Assert = Newtonsoft.Json.Tests.XUnitAssert;
#else
using NUnit.Framework;
#endif
using Newtonsoft.Json;
using System.IO;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Utilities;
namespace Newtonsoft.Json.Tests
{
[TestFixture]
public class JsonTextWriterTest : TestFixtureBase
{
[Test]
public void NewLine()
{
MemoryStream ms = new MemoryStream();
using (var streamWriter = new StreamWriter(ms, new UTF8Encoding(false)) { NewLine = "\n" })
using (var jsonWriter = new JsonTextWriter(streamWriter)
{
CloseOutput = true,
Indentation = 2,
Formatting = Formatting.Indented
})
{
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("prop");
jsonWriter.WriteValue(true);
jsonWriter.WriteEndObject();
}
byte[] data = ms.ToArray();
string json = Encoding.UTF8.GetString(data, 0, data.Length);
Assert.AreEqual(@"{" + '\n' + @" ""prop"": true" + '\n' + "}", json);
}
[Test]
public void QuoteNameAndStrings()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
JsonTextWriter writer = new JsonTextWriter(sw) { QuoteName = false };
writer.WriteStartObject();
writer.WritePropertyName("name");
writer.WriteValue("value");
writer.WriteEndObject();
writer.Flush();
Assert.AreEqual(@"{name:""value""}", sb.ToString());
}
[Test]
public void CloseOutput()
{
MemoryStream ms = new MemoryStream();
JsonTextWriter writer = new JsonTextWriter(new StreamWriter(ms));
Assert.IsTrue(ms.CanRead);
writer.Close();
Assert.IsFalse(ms.CanRead);
ms = new MemoryStream();
writer = new JsonTextWriter(new StreamWriter(ms)) { CloseOutput = false };
Assert.IsTrue(ms.CanRead);
writer.Close();
Assert.IsTrue(ms.CanRead);
}
#if !(PORTABLE || ASPNETCORE50 || NETFX_CORE)
[Test]
public void WriteIConvertable()
{
var sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.WriteValue(new ConvertibleInt(1));
Assert.AreEqual("1", sw.ToString());
}
#endif
[Test]
public void ValueFormatting()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue('@');
jsonWriter.WriteValue("\r\n\t\f\b?{\\r\\n\"\'");
jsonWriter.WriteValue(true);
jsonWriter.WriteValue(10);
jsonWriter.WriteValue(10.99);
jsonWriter.WriteValue(0.99);
jsonWriter.WriteValue(0.000000000000000001d);
jsonWriter.WriteValue(0.000000000000000001m);
jsonWriter.WriteValue((string)null);
jsonWriter.WriteValue((object)null);
jsonWriter.WriteValue("This is a string.");
jsonWriter.WriteNull();
jsonWriter.WriteUndefined();
jsonWriter.WriteEndArray();
}
string expected = @"[""@"",""\r\n\t\f\b?{\\r\\n\""'"",true,10,10.99,0.99,1E-18,0.000000000000000001,null,null,""This is a string."",null,undefined]";
string result = sb.ToString();
Console.WriteLine("ValueFormatting");
Console.WriteLine(result);
Assert.AreEqual(expected, result);
}
[Test]
public void NullableValueFormatting()
{
StringWriter sw = new StringWriter();
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue((char?)null);
jsonWriter.WriteValue((char?)'c');
jsonWriter.WriteValue((bool?)null);
jsonWriter.WriteValue((bool?)true);
jsonWriter.WriteValue((byte?)null);
jsonWriter.WriteValue((byte?)1);
jsonWriter.WriteValue((sbyte?)null);
jsonWriter.WriteValue((sbyte?)1);
jsonWriter.WriteValue((short?)null);
jsonWriter.WriteValue((short?)1);
jsonWriter.WriteValue((ushort?)null);
jsonWriter.WriteValue((ushort?)1);
jsonWriter.WriteValue((int?)null);
jsonWriter.WriteValue((int?)1);
jsonWriter.WriteValue((uint?)null);
jsonWriter.WriteValue((uint?)1);
jsonWriter.WriteValue((long?)null);
jsonWriter.WriteValue((long?)1);
jsonWriter.WriteValue((ulong?)null);
jsonWriter.WriteValue((ulong?)1);
jsonWriter.WriteValue((double?)null);
jsonWriter.WriteValue((double?)1.1);
jsonWriter.WriteValue((float?)null);
jsonWriter.WriteValue((float?)1.1);
jsonWriter.WriteValue((decimal?)null);
jsonWriter.WriteValue((decimal?)1.1m);
jsonWriter.WriteValue((DateTime?)null);
jsonWriter.WriteValue((DateTime?)new DateTime(DateTimeUtils.InitialJavaScriptDateTicks, DateTimeKind.Utc));
#if !NET20
jsonWriter.WriteValue((DateTimeOffset?)null);
jsonWriter.WriteValue((DateTimeOffset?)new DateTimeOffset(DateTimeUtils.InitialJavaScriptDateTicks, TimeSpan.Zero));
#endif
jsonWriter.WriteEndArray();
}
string json = sw.ToString();
string expected;
#if !NET20
expected = @"[null,""c"",null,true,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1.1,null,1.1,null,1.1,null,""1970-01-01T00:00:00Z"",null,""1970-01-01T00:00:00+00:00""]";
#else
expected = @"[null,""c"",null,true,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1,null,1.1,null,1.1,null,1.1,null,""1970-01-01T00:00:00Z""]";
#endif
Assert.AreEqual(expected, json);
}
[Test]
public void WriteValueObjectWithNullable()
{
StringWriter sw = new StringWriter();
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
char? value = 'c';
jsonWriter.WriteStartArray();
jsonWriter.WriteValue((object)value);
jsonWriter.WriteEndArray();
}
string json = sw.ToString();
string expected = @"[""c""]";
Assert.AreEqual(expected, json);
}
[Test]
public void WriteValueObjectWithUnsupportedValue()
{
ExceptionAssert.Throws<JsonWriterException>(() =>
{
StringWriter sw = new StringWriter();
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(new Version(1, 1, 1, 1));
jsonWriter.WriteEndArray();
}
}, @"Unsupported type: System.Version. Use the JsonSerializer class to get the object's JSON representation. Path ''.");
}
[Test]
public void StringEscaping()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(@"""These pretzels are making me thirsty!""");
jsonWriter.WriteValue("Jeff's house was burninated.");
jsonWriter.WriteValue("1. You don't talk about fight club.\r\n2. You don't talk about fight club.");
jsonWriter.WriteValue("35% of\t statistics\n are made\r up.");
jsonWriter.WriteEndArray();
}
string expected = @"[""\""These pretzels are making me thirsty!\"""",""Jeff's house was burninated."",""1. You don't talk about fight club.\r\n2. You don't talk about fight club."",""35% of\t statistics\n are made\r up.""]";
string result = sb.ToString();
Console.WriteLine("StringEscaping");
Console.WriteLine(result);
Assert.AreEqual(expected, result);
}
[Test]
public void WriteEnd()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("CPU");
jsonWriter.WriteValue("Intel");
jsonWriter.WritePropertyName("PSU");
jsonWriter.WriteValue("500W");
jsonWriter.WritePropertyName("Drives");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue("DVD read/writer");
jsonWriter.WriteComment("(broken)");
jsonWriter.WriteValue("500 gigabyte hard drive");
jsonWriter.WriteValue("200 gigabype hard drive");
jsonWriter.WriteEndObject();
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
}
string expected = @"{
""CPU"": ""Intel"",
""PSU"": ""500W"",
""Drives"": [
""DVD read/writer""
/*(broken)*/,
""500 gigabyte hard drive"",
""200 gigabype hard drive""
]
}";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void CloseWithRemainingContent()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("CPU");
jsonWriter.WriteValue("Intel");
jsonWriter.WritePropertyName("PSU");
jsonWriter.WriteValue("500W");
jsonWriter.WritePropertyName("Drives");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue("DVD read/writer");
jsonWriter.WriteComment("(broken)");
jsonWriter.WriteValue("500 gigabyte hard drive");
jsonWriter.WriteValue("200 gigabype hard drive");
jsonWriter.Close();
}
string expected = @"{
""CPU"": ""Intel"",
""PSU"": ""500W"",
""Drives"": [
""DVD read/writer""
/*(broken)*/,
""500 gigabyte hard drive"",
""200 gigabype hard drive""
]
}";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void Indenting()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("CPU");
jsonWriter.WriteValue("Intel");
jsonWriter.WritePropertyName("PSU");
jsonWriter.WriteValue("500W");
jsonWriter.WritePropertyName("Drives");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue("DVD read/writer");
jsonWriter.WriteComment("(broken)");
jsonWriter.WriteValue("500 gigabyte hard drive");
jsonWriter.WriteValue("200 gigabype hard drive");
jsonWriter.WriteEnd();
jsonWriter.WriteEndObject();
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
}
// {
// "CPU": "Intel",
// "PSU": "500W",
// "Drives": [
// "DVD read/writer"
// /*(broken)*/,
// "500 gigabyte hard drive",
// "200 gigabype hard drive"
// ]
// }
string expected = @"{
""CPU"": ""Intel"",
""PSU"": ""500W"",
""Drives"": [
""DVD read/writer""
/*(broken)*/,
""500 gigabyte hard drive"",
""200 gigabype hard drive""
]
}";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void State()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
jsonWriter.WriteStartObject();
Assert.AreEqual(WriteState.Object, jsonWriter.WriteState);
Assert.AreEqual("", jsonWriter.Path);
jsonWriter.WritePropertyName("CPU");
Assert.AreEqual(WriteState.Property, jsonWriter.WriteState);
Assert.AreEqual("CPU", jsonWriter.Path);
jsonWriter.WriteValue("Intel");
Assert.AreEqual(WriteState.Object, jsonWriter.WriteState);
Assert.AreEqual("CPU", jsonWriter.Path);
jsonWriter.WritePropertyName("Drives");
Assert.AreEqual(WriteState.Property, jsonWriter.WriteState);
Assert.AreEqual("Drives", jsonWriter.Path);
jsonWriter.WriteStartArray();
Assert.AreEqual(WriteState.Array, jsonWriter.WriteState);
jsonWriter.WriteValue("DVD read/writer");
Assert.AreEqual(WriteState.Array, jsonWriter.WriteState);
Assert.AreEqual("Drives[0]", jsonWriter.Path);
jsonWriter.WriteEnd();
Assert.AreEqual(WriteState.Object, jsonWriter.WriteState);
Assert.AreEqual("Drives", jsonWriter.Path);
jsonWriter.WriteEndObject();
Assert.AreEqual(WriteState.Start, jsonWriter.WriteState);
Assert.AreEqual("", jsonWriter.Path);
}
}
[Test]
public void FloatingPointNonFiniteNumbers_Symbol()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
NaN,
Infinity,
-Infinity,
NaN,
Infinity,
-Infinity
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void FloatingPointNonFiniteNumbers_Zero()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.DefaultValue;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteValue((double?)double.NaN);
jsonWriter.WriteValue((double?)double.PositiveInfinity);
jsonWriter.WriteValue((double?)double.NegativeInfinity);
jsonWriter.WriteValue((float?)float.NaN);
jsonWriter.WriteValue((float?)float.PositiveInfinity);
jsonWriter.WriteValue((float?)float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
0.0,
0.0,
0.0,
0.0,
0.0,
0.0,
null,
null,
null,
null,
null,
null
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void FloatingPointNonFiniteNumbers_String()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.String;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
""NaN"",
""Infinity"",
""-Infinity"",
""NaN"",
""Infinity"",
""-Infinity""
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void FloatingPointNonFiniteNumbers_QuoteChar()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.String;
jsonWriter.QuoteChar = '\'';
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteEndArray();
jsonWriter.Flush();
}
string expected = @"[
'NaN',
'Infinity',
'-Infinity',
'NaN',
'Infinity',
'-Infinity'
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void WriteRawInStart()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteRaw("[1,2,3,4,5]");
jsonWriter.WriteWhitespace(" ");
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteEndArray();
}
string expected = @"[1,2,3,4,5] [
NaN
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void WriteRawInArray()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteRaw(",[1,2,3,4,5]");
jsonWriter.WriteRaw(",[1,2,3,4,5]");
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteEndArray();
}
string expected = @"[
NaN,[1,2,3,4,5],[1,2,3,4,5],
NaN
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void WriteRawInObject()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.WriteStartObject();
jsonWriter.WriteRaw(@"""PropertyName"":[1,2,3,4,5]");
jsonWriter.WriteEnd();
}
string expected = @"{""PropertyName"":[1,2,3,4,5]}";
string result = sb.ToString();
Assert.AreEqual(expected, result);
}
[Test]
public void WriteToken()
{
JsonTextReader reader = new JsonTextReader(new StringReader("[1,2,3,4,5]"));
reader.Read();
reader.Read();
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.WriteToken(reader);
Assert.AreEqual("1", sw.ToString());
}
[Test]
public void WriteRawValue()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
int i = 0;
string rawJson = "[1,2]";
jsonWriter.WriteStartObject();
while (i < 3)
{
jsonWriter.WritePropertyName("d" + i);
jsonWriter.WriteRawValue(rawJson);
i++;
}
jsonWriter.WriteEndObject();
}
Assert.AreEqual(@"{""d0"":[1,2],""d1"":[1,2],""d2"":[1,2]}", sb.ToString());
}
[Test]
public void WriteObjectNestedInConstructor()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("con");
jsonWriter.WriteStartConstructor("Ext.data.JsonStore");
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("aa");
jsonWriter.WriteValue("aa");
jsonWriter.WriteEndObject();
jsonWriter.WriteEndConstructor();
jsonWriter.WriteEndObject();
}
Assert.AreEqual(@"{""con"":new Ext.data.JsonStore({""aa"":""aa""})}", sb.ToString());
}
[Test]
public void WriteFloatingPointNumber()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(0.0);
jsonWriter.WriteValue(0f);
jsonWriter.WriteValue(0.1);
jsonWriter.WriteValue(1.0);
jsonWriter.WriteValue(1.000001);
jsonWriter.WriteValue(0.000001);
jsonWriter.WriteValue(double.Epsilon);
jsonWriter.WriteValue(double.PositiveInfinity);
jsonWriter.WriteValue(double.NegativeInfinity);
jsonWriter.WriteValue(double.NaN);
jsonWriter.WriteValue(double.MaxValue);
jsonWriter.WriteValue(double.MinValue);
jsonWriter.WriteValue(float.PositiveInfinity);
jsonWriter.WriteValue(float.NegativeInfinity);
jsonWriter.WriteValue(float.NaN);
jsonWriter.WriteEndArray();
}
Assert.AreEqual(@"[0.0,0.0,0.1,1.0,1.000001,1E-06,4.94065645841247E-324,Infinity,-Infinity,NaN,1.7976931348623157E+308,-1.7976931348623157E+308,Infinity,-Infinity,NaN]", sb.ToString());
}
[Test]
public void WriteIntegerNumber()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw) { Formatting = Formatting.Indented })
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(int.MaxValue);
jsonWriter.WriteValue(int.MinValue);
jsonWriter.WriteValue(0);
jsonWriter.WriteValue(-0);
jsonWriter.WriteValue(9L);
jsonWriter.WriteValue(9UL);
jsonWriter.WriteValue(long.MaxValue);
jsonWriter.WriteValue(long.MinValue);
jsonWriter.WriteValue(ulong.MaxValue);
jsonWriter.WriteValue(ulong.MinValue);
jsonWriter.WriteEndArray();
}
Console.WriteLine(sb.ToString());
StringAssert.AreEqual(@"[
2147483647,
-2147483648,
0,
0,
9,
9,
9223372036854775807,
-9223372036854775808,
18446744073709551615,
0
]", sb.ToString());
}
[Test]
public void BadWriteEndArray()
{
ExceptionAssert.Throws<JsonWriterException>(() =>
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(0.0);
jsonWriter.WriteEndArray();
jsonWriter.WriteEndArray();
}
}, "No token to close. Path ''.");
}
[Test]
public void InvalidQuoteChar()
{
ExceptionAssert.Throws<ArgumentException>(() =>
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.QuoteChar = '*';
}
}, @"Invalid JavaScript string quote character. Valid quote characters are ' and "".");
}
[Test]
public void Indentation()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
jsonWriter.FloatFormatHandling = FloatFormatHandling.Symbol;
Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting);
jsonWriter.Indentation = 5;
Assert.AreEqual(5, jsonWriter.Indentation);
jsonWriter.IndentChar = '_';
Assert.AreEqual('_', jsonWriter.IndentChar);
jsonWriter.QuoteName = true;
Assert.AreEqual(true, jsonWriter.QuoteName);
jsonWriter.QuoteChar = '\'';
Assert.AreEqual('\'', jsonWriter.QuoteChar);
jsonWriter.WriteStartObject();
jsonWriter.WritePropertyName("propertyName");
jsonWriter.WriteValue(double.NaN);
jsonWriter.IndentChar = '?';
Assert.AreEqual('?', jsonWriter.IndentChar);
jsonWriter.Indentation = 6;
Assert.AreEqual(6, jsonWriter.Indentation);
jsonWriter.WritePropertyName("prop2");
jsonWriter.WriteValue(123);
jsonWriter.WriteEndObject();
}
string expected = @"{
_____'propertyName': NaN,
??????'prop2': 123
}";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void WriteSingleBytes()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
string text = "Hello world.";
byte[] data = Encoding.UTF8.GetBytes(text);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting);
jsonWriter.WriteValue(data);
}
string expected = @"""SGVsbG8gd29ybGQu""";
string result = sb.ToString();
Assert.AreEqual(expected, result);
byte[] d2 = Convert.FromBase64String(result.Trim('"'));
Assert.AreEqual(text, Encoding.UTF8.GetString(d2, 0, d2.Length));
}
[Test]
public void WriteBytesInArray()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
string text = "Hello world.";
byte[] data = Encoding.UTF8.GetBytes(text);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = Formatting.Indented;
Assert.AreEqual(Formatting.Indented, jsonWriter.Formatting);
jsonWriter.WriteStartArray();
jsonWriter.WriteValue(data);
jsonWriter.WriteValue(data);
jsonWriter.WriteValue((object)data);
jsonWriter.WriteValue((byte[])null);
jsonWriter.WriteValue((Uri)null);
jsonWriter.WriteEndArray();
}
string expected = @"[
""SGVsbG8gd29ybGQu"",
""SGVsbG8gd29ybGQu"",
""SGVsbG8gd29ybGQu"",
null,
null
]";
string result = sb.ToString();
StringAssert.AreEqual(expected, result);
}
[Test]
public void Path()
{
StringBuilder sb = new StringBuilder();
StringWriter sw = new StringWriter(sb);
string text = "Hello world.";
byte[] data = Encoding.UTF8.GetBytes(text);
using (JsonTextWriter writer = new JsonTextWriter(sw))
{
writer.Formatting = Formatting.Indented;
writer.WriteStartArray();
Assert.AreEqual("", writer.Path);
writer.WriteStartObject();
Assert.AreEqual("[0]", writer.Path);
writer.WritePropertyName("Property1");
Assert.AreEqual("[0].Property1", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1", writer.Path);
writer.WriteValue(1);
Assert.AreEqual("[0].Property1[0]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1[1]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1[1][0]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[0].Property1[1][0][0]", writer.Path);
writer.WriteEndObject();
Assert.AreEqual("[0]", writer.Path);
writer.WriteStartObject();
Assert.AreEqual("[1]", writer.Path);
writer.WritePropertyName("Property2");
Assert.AreEqual("[1].Property2", writer.Path);
writer.WriteStartConstructor("Constructor1");
Assert.AreEqual("[1].Property2", writer.Path);
writer.WriteNull();
Assert.AreEqual("[1].Property2[0]", writer.Path);
writer.WriteStartArray();
Assert.AreEqual("[1].Property2[1]", writer.Path);
writer.WriteValue(1);
Assert.AreEqual("[1].Property2[1][0]", writer.Path);
writer.WriteEnd();
Assert.AreEqual("[1].Property2[1]", writer.Path);
writer.WriteEndObject();
Assert.AreEqual("[1]", writer.Path);
writer.WriteEndArray();
Assert.AreEqual("", writer.Path);
}
StringAssert.AreEqual(@"[
{
""Property1"": [
1,
[
[
[]
]
]
]
},
{
""Property2"": new Constructor1(
null,
[
1
]
)
}
]", sb.ToString());
}
[Test]
public void BuildStateArray()
{
JsonWriter.State[][] stateArray = JsonWriter.BuildStateArray();
var valueStates = JsonWriter.StateArrayTempate[7];
foreach (JsonToken valueToken in EnumUtils.GetValues(typeof(JsonToken)))
{
switch (valueToken)
{
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.String:
case JsonToken.Boolean:
case JsonToken.Null:
case JsonToken.Undefined:
case JsonToken.Date:
case JsonToken.Bytes:
Assert.AreEqual(valueStates, stateArray[(int)valueToken], "Error for " + valueToken + " states.");
break;
}
}
}
[Test]
public void DateTimeZoneHandling()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
DateTimeZoneHandling = Json.DateTimeZoneHandling.Utc
};
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Unspecified));
Assert.AreEqual(@"""2000-01-01T01:01:01Z""", sw.ToString());
}
[Test]
public void HtmlStringEscapeHandling()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
StringEscapeHandling = StringEscapeHandling.EscapeHtml
};
string script = @"<script type=""text/javascript"">alert('hi');</script>";
writer.WriteValue(script);
string json = sw.ToString();
Assert.AreEqual(@"""\u003cscript type=\u0022text/javascript\u0022\u003ealert(\u0027hi\u0027);\u003c/script\u003e""", json);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
Assert.AreEqual(script, reader.ReadAsString());
//Console.WriteLine(HttpUtility.HtmlEncode(script));
//System.Web.Script.Serialization.JavaScriptSerializer s = new System.Web.Script.Serialization.JavaScriptSerializer();
//Console.WriteLine(s.Serialize(new { html = script }));
}
[Test]
public void NonAsciiStringEscapeHandling()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw)
{
StringEscapeHandling = StringEscapeHandling.EscapeNonAscii
};
string unicode = "\u5f20";
writer.WriteValue(unicode);
string json = sw.ToString();
Assert.AreEqual(8, json.Length);
Assert.AreEqual(@"""\u5f20""", json);
JsonTextReader reader = new JsonTextReader(new StringReader(json));
Assert.AreEqual(unicode, reader.ReadAsString());
sw = new StringWriter();
writer = new JsonTextWriter(sw)
{
StringEscapeHandling = StringEscapeHandling.Default
};
writer.WriteValue(unicode);
json = sw.ToString();
Assert.AreEqual(3, json.Length);
Assert.AreEqual("\"\u5f20\"", json);
}
[Test]
public void WriteEndOnProperty()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.QuoteChar = '\'';
writer.WriteStartObject();
writer.WritePropertyName("Blah");
writer.WriteEnd();
Assert.AreEqual("{'Blah':null}", sw.ToString());
}
#if !NET20
[Test]
public void QuoteChar()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.Formatting = Formatting.Indented;
writer.QuoteChar = '\'';
writer.WriteStartArray();
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.DateFormatHandling = DateFormatHandling.MicrosoftDateFormat;
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.DateFormatString = "yyyy gg";
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.WriteValue(new byte[] { 1, 2, 3 });
writer.WriteValue(TimeSpan.Zero);
writer.WriteValue(new Uri("http://www.google.com/"));
writer.WriteValue(Guid.Empty);
writer.WriteEnd();
StringAssert.AreEqual(@"[
'2000-01-01T01:01:01Z',
'2000-01-01T01:01:01+00:00',
'\/Date(946688461000)\/',
'\/Date(946688461000+0000)\/',
'2000 A.D.',
'2000 A.D.',
'AQID',
'00:00:00',
'http://www.google.com/',
'00000000-0000-0000-0000-000000000000'
]", sw.ToString());
}
[Test]
public void Culture()
{
StringWriter sw = new StringWriter();
JsonTextWriter writer = new JsonTextWriter(sw);
writer.Formatting = Formatting.Indented;
writer.DateFormatString = "yyyy tt";
writer.Culture = new CultureInfo("en-NZ");
writer.QuoteChar = '\'';
writer.WriteStartArray();
writer.WriteValue(new DateTime(2000, 1, 1, 1, 1, 1, DateTimeKind.Utc));
writer.WriteValue(new DateTimeOffset(2000, 1, 1, 1, 1, 1, TimeSpan.Zero));
writer.WriteEnd();
StringAssert.AreEqual(@"[
'2000 a.m.',
'2000 a.m.'
]", sw.ToString());
}
#endif
[Test]
public void CompareNewStringEscapingWithOld()
{
Console.WriteLine("Started");
char c = (char)0;
do
{
if (c % 1000 == 0)
Console.WriteLine("Position: " + (int)c);
StringWriter swNew = new StringWriter();
char[] buffer = null;
JavaScriptUtils.WriteEscapedJavaScriptString(swNew, c.ToString(), '"', true, JavaScriptUtils.DoubleQuoteCharEscapeFlags, StringEscapeHandling.Default, ref buffer);
StringWriter swOld = new StringWriter();
WriteEscapedJavaScriptStringOld(swOld, c.ToString(), '"', true);
string newText = swNew.ToString();
string oldText = swOld.ToString();
if (newText != oldText)
throw new Exception("Difference for char '{0}' (value {1}). Old text: {2}, New text: {3}".FormatWith(CultureInfo.InvariantCulture, c, (int)c, oldText, newText));
c++;
} while (c != char.MaxValue);
Console.WriteLine("Finished");
}
private const string EscapedUnicodeText = "!";
private static void WriteEscapedJavaScriptStringOld(TextWriter writer, string s, char delimiter, bool appendDelimiters)
{
// leading delimiter
if (appendDelimiters)
writer.Write(delimiter);
if (s != null)
{
char[] chars = null;
char[] unicodeBuffer = null;
int lastWritePosition = 0;
for (int i = 0; i < s.Length; i++)
{
var c = s[i];
// don't escape standard text/numbers except '\' and the text delimiter
if (c >= ' ' && c < 128 && c != '\\' && c != delimiter)
continue;
string escapedValue;
switch (c)
{
case '\t':
escapedValue = @"\t";
break;
case '\n':
escapedValue = @"\n";
break;
case '\r':
escapedValue = @"\r";
break;
case '\f':
escapedValue = @"\f";
break;
case '\b':
escapedValue = @"\b";
break;
case '\\':
escapedValue = @"\\";
break;
case '\u0085': // Next Line
escapedValue = @"\u0085";
break;
case '\u2028': // Line Separator
escapedValue = @"\u2028";
break;
case '\u2029': // Paragraph Separator
escapedValue = @"\u2029";
break;
case '\'':
// this charater is being used as the delimiter
escapedValue = @"\'";
break;
case '"':
// this charater is being used as the delimiter
escapedValue = "\\\"";
break;
default:
if (c <= '\u001f')
{
if (unicodeBuffer == null)
unicodeBuffer = new char[6];
StringUtils.ToCharAsUnicode(c, unicodeBuffer);
// slightly hacky but it saves multiple conditions in if test
escapedValue = EscapedUnicodeText;
}
else
{
escapedValue = null;
}
break;
}
if (escapedValue == null)
continue;
if (i > lastWritePosition)
{
if (chars == null)
chars = s.ToCharArray();
// write unchanged chars before writing escaped text
writer.Write(chars, lastWritePosition, i - lastWritePosition);
}
lastWritePosition = i + 1;
if (!string.Equals(escapedValue, EscapedUnicodeText))
writer.Write(escapedValue);
else
writer.Write(unicodeBuffer);
}
if (lastWritePosition == 0)
{
// no escaped text, write entire string
writer.Write(s);
}
else
{
if (chars == null)
chars = s.ToCharArray();
// write remaining text
writer.Write(chars, lastWritePosition, s.Length - lastWritePosition);
}
}
// trailing delimiter
if (appendDelimiters)
writer.Write(delimiter);
}
[Test]
public void CustomJsonTextWriterTests()
{
StringWriter sw = new StringWriter();
CustomJsonTextWriter writer = new CustomJsonTextWriter(sw) { Formatting = Formatting.Indented };
writer.WriteStartObject();
Assert.AreEqual(WriteState.Object, writer.WriteState);
writer.WritePropertyName("Property1");
Assert.AreEqual(WriteState.Property, writer.WriteState);
Assert.AreEqual("Property1", writer.Path);
writer.WriteNull();
Assert.AreEqual(WriteState.Object, writer.WriteState);
writer.WriteEndObject();
Assert.AreEqual(WriteState.Start, writer.WriteState);
StringAssert.AreEqual(@"{{{
""1ytreporP"": NULL!!!
}}}", sw.ToString());
}
[Test]
public void QuoteDictionaryNames()
{
var d = new Dictionary<string, int>
{
{ "a", 1 },
};
var jsonSerializerSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
};
var serializer = JsonSerializer.Create(jsonSerializerSettings);
using (var stringWriter = new StringWriter())
{
using (var writer = new JsonTextWriter(stringWriter) { QuoteName = false })
{
serializer.Serialize(writer, d);
writer.Close();
}
StringAssert.AreEqual(@"{
a: 1
}", stringWriter.ToString());
}
}
[Test]
public void WriteComments()
{
string json = @"//comment*//*hi*/
{//comment
Name://comment
true//comment after true" + StringUtils.CarriageReturn + @"
,//comment after comma" + StringUtils.CarriageReturnLineFeed + @"
""ExpiryDate""://comment" + StringUtils.LineFeed + @"
new
" + StringUtils.LineFeed +
@"Constructor
(//comment
null//comment
),
""Price"": 3.99,
""Sizes"": //comment
[//comment
""Small""//comment
]//comment
}//comment
//comment 1 ";
JsonTextReader r = new JsonTextReader(new StringReader(json));
StringWriter sw = new StringWriter();
JsonTextWriter w = new JsonTextWriter(sw);
w.Formatting = Formatting.Indented;
w.WriteToken(r, true);
StringAssert.AreEqual(@"/*comment*//*hi*/*/{/*comment*/
""Name"": /*comment*/ true/*comment after true*//*comment after comma*/,
""ExpiryDate"": /*comment*/ new Constructor(
/*comment*/,
null
/*comment*/
),
""Price"": 3.99,
""Sizes"": /*comment*/ [
/*comment*/
""Small""
/*comment*/
]/*comment*/
}/*comment *//*comment 1 */", sw.ToString());
}
}
public class CustomJsonTextWriter : JsonTextWriter
{
private readonly TextWriter _writer;
public CustomJsonTextWriter(TextWriter textWriter) : base(textWriter)
{
_writer = textWriter;
}
public override void WritePropertyName(string name)
{
WritePropertyName(name, true);
}
public override void WritePropertyName(string name, bool escape)
{
SetWriteState(JsonToken.PropertyName, name);
if (QuoteName)
_writer.Write(QuoteChar);
_writer.Write(new string(name.ToCharArray().Reverse().ToArray()));
if (QuoteName)
_writer.Write(QuoteChar);
_writer.Write(':');
}
public override void WriteNull()
{
SetWriteState(JsonToken.Null, null);
_writer.Write("NULL!!!");
}
public override void WriteStartObject()
{
SetWriteState(JsonToken.StartObject, null);
_writer.Write("{{{");
}
public override void WriteEndObject()
{
SetWriteState(JsonToken.EndObject, null);
}
protected override void WriteEnd(JsonToken token)
{
if (token == JsonToken.EndObject)
_writer.Write("}}}");
else
base.WriteEnd(token);
}
}
#if !(PORTABLE || ASPNETCORE50 || NETFX_CORE)
public struct ConvertibleInt : IConvertible
{
private readonly int _value;
public ConvertibleInt(int value)
{
_value = value;
}
public TypeCode GetTypeCode()
{
return TypeCode.Int32;
}
public bool ToBoolean(IFormatProvider provider)
{
throw new NotImplementedException();
}
public byte ToByte(IFormatProvider provider)
{
throw new NotImplementedException();
}
public char ToChar(IFormatProvider provider)
{
throw new NotImplementedException();
}
public DateTime ToDateTime(IFormatProvider provider)
{
throw new NotImplementedException();
}
public decimal ToDecimal(IFormatProvider provider)
{
throw new NotImplementedException();
}
public double ToDouble(IFormatProvider provider)
{
throw new NotImplementedException();
}
public short ToInt16(IFormatProvider provider)
{
throw new NotImplementedException();
}
public int ToInt32(IFormatProvider provider)
{
throw new NotImplementedException();
}
public long ToInt64(IFormatProvider provider)
{
throw new NotImplementedException();
}
public sbyte ToSByte(IFormatProvider provider)
{
throw new NotImplementedException();
}
public float ToSingle(IFormatProvider provider)
{
throw new NotImplementedException();
}
public string ToString(IFormatProvider provider)
{
throw new NotImplementedException();
}
public object ToType(Type conversionType, IFormatProvider provider)
{
if (conversionType == typeof(int))
return _value;
throw new Exception("Type not supported: " + conversionType.FullName);
}
public ushort ToUInt16(IFormatProvider provider)
{
throw new NotImplementedException();
}
public uint ToUInt32(IFormatProvider provider)
{
throw new NotImplementedException();
}
public ulong ToUInt64(IFormatProvider provider)
{
throw new NotImplementedException();
}
}
#endif
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Diagnostics.Contracts;
using System.Runtime.Serialization;
using System.Threading;
namespace System.Globalization
{
// Gregorian Calendars use Era Info
[Serializable]
internal class EraInfo
{
internal int era; // The value of the era.
internal long ticks; // The time in ticks when the era starts
internal int yearOffset; // The offset to Gregorian year when the era starts.
// Gregorian Year = Era Year + yearOffset
// Era Year = Gregorian Year - yearOffset
internal int minEraYear; // Min year value in this era. Generally, this value is 1, but this may
// be affected by the DateTime.MinValue;
internal int maxEraYear; // Max year value in this era. (== the year length of the era + 1)
[OptionalField(VersionAdded = 4)]
internal String eraName; // The era name
[OptionalField(VersionAdded = 4)]
internal String abbrevEraName; // Abbreviated Era Name
[OptionalField(VersionAdded = 4)]
internal String englishEraName; // English era name
internal EraInfo(int era, int startYear, int startMonth, int startDay, int yearOffset, int minEraYear, int maxEraYear)
{
this.era = era;
this.yearOffset = yearOffset;
this.minEraYear = minEraYear;
this.maxEraYear = maxEraYear;
this.ticks = new DateTime(startYear, startMonth, startDay).Ticks;
}
internal EraInfo(int era, int startYear, int startMonth, int startDay, int yearOffset, int minEraYear, int maxEraYear,
String eraName, String abbrevEraName, String englishEraName)
{
this.era = era;
this.yearOffset = yearOffset;
this.minEraYear = minEraYear;
this.maxEraYear = maxEraYear;
this.ticks = new DateTime(startYear, startMonth, startDay).Ticks;
this.eraName = eraName;
this.abbrevEraName = abbrevEraName;
this.englishEraName = englishEraName;
}
}
// This calendar recognizes two era values:
// 0 CurrentEra (AD)
// 1 BeforeCurrentEra (BC)
[Serializable]
internal class GregorianCalendarHelper
{
// 1 tick = 100ns = 10E-7 second
// Number of ticks per time unit
internal const long TicksPerMillisecond = 10000;
internal const long TicksPerSecond = TicksPerMillisecond * 1000;
internal const long TicksPerMinute = TicksPerSecond * 60;
internal const long TicksPerHour = TicksPerMinute * 60;
internal const long TicksPerDay = TicksPerHour * 24;
// Number of milliseconds per time unit
internal const int MillisPerSecond = 1000;
internal const int MillisPerMinute = MillisPerSecond * 60;
internal const int MillisPerHour = MillisPerMinute * 60;
internal const int MillisPerDay = MillisPerHour * 24;
// Number of days in a non-leap year
internal const int DaysPerYear = 365;
// Number of days in 4 years
internal const int DaysPer4Years = DaysPerYear * 4 + 1;
// Number of days in 100 years
internal const int DaysPer100Years = DaysPer4Years * 25 - 1;
// Number of days in 400 years
internal const int DaysPer400Years = DaysPer100Years * 4 + 1;
// Number of days from 1/1/0001 to 1/1/10000
internal const int DaysTo10000 = DaysPer400Years * 25 - 366;
internal const long MaxMillis = (long)DaysTo10000 * MillisPerDay;
internal const int DatePartYear = 0;
internal const int DatePartDayOfYear = 1;
internal const int DatePartMonth = 2;
internal const int DatePartDay = 3;
//
// This is the max Gregorian year can be represented by DateTime class. The limitation
// is derived from DateTime class.
//
internal int MaxYear
{
get
{
return (m_maxYear);
}
}
internal static readonly int[] DaysToMonth365 =
{
0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365
};
internal static readonly int[] DaysToMonth366 =
{
0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366
};
[OptionalField(VersionAdded = 1)]
internal int m_maxYear = 9999;
[OptionalField(VersionAdded = 1)]
internal int m_minYear;
internal Calendar m_Cal;
[OptionalField(VersionAdded = 1)]
internal EraInfo[] m_EraInfo;
[OptionalField(VersionAdded = 1)]
internal int[] m_eras = null;
// Construct an instance of gregorian calendar.
internal GregorianCalendarHelper(Calendar cal, EraInfo[] eraInfo)
{
m_Cal = cal;
m_EraInfo = eraInfo;
m_maxYear = m_EraInfo[0].maxEraYear;
m_minYear = m_EraInfo[0].minEraYear; ;
}
/*=================================GetGregorianYear==========================
**Action: Get the Gregorian year value for the specified year in an era.
**Returns: The Gregorian year value.
**Arguments:
** year the year value in Japanese calendar
** era the Japanese emperor era value.
**Exceptions:
** ArgumentOutOfRangeException if year value is invalid or era value is invalid.
============================================================================*/
internal int GetGregorianYear(int year, int era)
{
if (year < 0)
{
throw new ArgumentOutOfRangeException(nameof(year),
SR.ArgumentOutOfRange_NeedNonNegNum);
}
Contract.EndContractBlock();
if (era == Calendar.CurrentEra)
{
era = m_Cal.CurrentEraValue;
}
for (int i = 0; i < m_EraInfo.Length; i++)
{
if (era == m_EraInfo[i].era)
{
if (year < m_EraInfo[i].minEraYear || year > m_EraInfo[i].maxEraYear)
{
throw new ArgumentOutOfRangeException(
nameof(year),
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
m_EraInfo[i].minEraYear,
m_EraInfo[i].maxEraYear));
}
return (m_EraInfo[i].yearOffset + year);
}
}
throw new ArgumentOutOfRangeException(nameof(era), SR.ArgumentOutOfRange_InvalidEraValue);
}
internal bool IsValidYear(int year, int era)
{
if (year < 0)
{
return false;
}
if (era == Calendar.CurrentEra)
{
era = m_Cal.CurrentEraValue;
}
for (int i = 0; i < m_EraInfo.Length; i++)
{
if (era == m_EraInfo[i].era)
{
if (year < m_EraInfo[i].minEraYear || year > m_EraInfo[i].maxEraYear)
{
return false;
}
return true;
}
}
return false;
}
// Returns a given date part of this DateTime. This method is used
// to compute the year, day-of-year, month, or day part.
internal virtual int GetDatePart(long ticks, int part)
{
CheckTicksRange(ticks);
// n = number of days since 1/1/0001
int n = (int)(ticks / TicksPerDay);
// y400 = number of whole 400-year periods since 1/1/0001
int y400 = n / DaysPer400Years;
// n = day number within 400-year period
n -= y400 * DaysPer400Years;
// y100 = number of whole 100-year periods within 400-year period
int y100 = n / DaysPer100Years;
// Last 100-year period has an extra day, so decrement result if 4
if (y100 == 4) y100 = 3;
// n = day number within 100-year period
n -= y100 * DaysPer100Years;
// y4 = number of whole 4-year periods within 100-year period
int y4 = n / DaysPer4Years;
// n = day number within 4-year period
n -= y4 * DaysPer4Years;
// y1 = number of whole years within 4-year period
int y1 = n / DaysPerYear;
// Last year has an extra day, so decrement result if 4
if (y1 == 4) y1 = 3;
// If year was requested, compute and return it
if (part == DatePartYear)
{
return (y400 * 400 + y100 * 100 + y4 * 4 + y1 + 1);
}
// n = day number within year
n -= y1 * DaysPerYear;
// If day-of-year was requested, return it
if (part == DatePartDayOfYear)
{
return (n + 1);
}
// Leap year calculation looks different from IsLeapYear since y1, y4,
// and y100 are relative to year 1, not year 0
bool leapYear = (y1 == 3 && (y4 != 24 || y100 == 3));
int[] days = leapYear ? DaysToMonth366 : DaysToMonth365;
// All months have less than 32 days, so n >> 5 is a good conservative
// estimate for the month
int m = (n >> 5) + 1;
// m = 1-based month number
while (n >= days[m]) m++;
// If month was requested, return it
if (part == DatePartMonth) return (m);
// Return 1-based day-of-month
return (n - days[m - 1] + 1);
}
/*=================================GetAbsoluteDate==========================
**Action: Gets the absolute date for the given Gregorian date. The absolute date means
** the number of days from January 1st, 1 A.D.
**Returns: the absolute date
**Arguments:
** year the Gregorian year
** month the Gregorian month
** day the day
**Exceptions:
** ArgumentOutOfRangException if year, month, day value is valid.
**Note:
** This is an internal method used by DateToTicks() and the calculations of Hijri and Hebrew calendars.
** Number of Days in Prior Years (both common and leap years) +
** Number of Days in Prior Months of Current Year +
** Number of Days in Current Month
**
============================================================================*/
internal static long GetAbsoluteDate(int year, int month, int day)
{
if (year >= 1 && year <= 9999 && month >= 1 && month <= 12)
{
int[] days = ((year % 4 == 0 && (year % 100 != 0 || year % 400 == 0))) ? DaysToMonth366 : DaysToMonth365;
if (day >= 1 && (day <= days[month] - days[month - 1]))
{
int y = year - 1;
int absoluteDate = y * 365 + y / 4 - y / 100 + y / 400 + days[month - 1] + day - 1;
return (absoluteDate);
}
}
throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadYearMonthDay);
}
// Returns the tick count corresponding to the given year, month, and day.
// Will check the if the parameters are valid.
internal static long DateToTicks(int year, int month, int day)
{
return (GetAbsoluteDate(year, month, day) * TicksPerDay);
}
// Return the tick count corresponding to the given hour, minute, second.
// Will check the if the parameters are valid.
internal static long TimeToTicks(int hour, int minute, int second, int millisecond)
{
//TimeSpan.TimeToTicks is a family access function which does no error checking, so
//we need to put some error checking out here.
if (hour >= 0 && hour < 24 && minute >= 0 && minute < 60 && second >= 0 && second < 60)
{
if (millisecond < 0 || millisecond >= MillisPerSecond)
{
throw new ArgumentOutOfRangeException(
nameof(millisecond),
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
0,
MillisPerSecond - 1));
}
return (InternalGloablizationHelper.TimeToTicks(hour, minute, second) + millisecond * TicksPerMillisecond); ;
}
throw new ArgumentOutOfRangeException(null, SR.ArgumentOutOfRange_BadHourMinuteSecond);
}
internal void CheckTicksRange(long ticks)
{
if (ticks < m_Cal.MinSupportedDateTime.Ticks || ticks > m_Cal.MaxSupportedDateTime.Ticks)
{
throw new ArgumentOutOfRangeException(
"time",
String.Format(
CultureInfo.InvariantCulture,
SR.ArgumentOutOfRange_CalendarRange,
m_Cal.MinSupportedDateTime,
m_Cal.MaxSupportedDateTime));
}
Contract.EndContractBlock();
}
// Returns the DateTime resulting from adding the given number of
// months to the specified DateTime. The result is computed by incrementing
// (or decrementing) the year and month parts of the specified DateTime by
// value months, and, if required, adjusting the day part of the
// resulting date downwards to the last day of the resulting month in the
// resulting year. The time-of-day part of the result is the same as the
// time-of-day part of the specified DateTime.
//
// In more precise terms, considering the specified DateTime to be of the
// form y / m / d + t, where y is the
// year, m is the month, d is the day, and t is the
// time-of-day, the result is y1 / m1 / d1 + t,
// where y1 and m1 are computed by adding value months
// to y and m, and d1 is the largest value less than
// or equal to d that denotes a valid day in month m1 of year
// y1.
//
public DateTime AddMonths(DateTime time, int months)
{
if (months < -120000 || months > 120000)
{
throw new ArgumentOutOfRangeException(
nameof(months),
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
-120000,
120000));
}
Contract.EndContractBlock();
CheckTicksRange(time.Ticks);
int y = GetDatePart(time.Ticks, DatePartYear);
int m = GetDatePart(time.Ticks, DatePartMonth);
int d = GetDatePart(time.Ticks, DatePartDay);
int i = m - 1 + months;
if (i >= 0)
{
m = i % 12 + 1;
y = y + i / 12;
}
else
{
m = 12 + (i + 1) % 12;
y = y + (i - 11) / 12;
}
int[] daysArray = (y % 4 == 0 && (y % 100 != 0 || y % 400 == 0)) ? DaysToMonth366 : DaysToMonth365;
int days = (daysArray[m] - daysArray[m - 1]);
if (d > days)
{
d = days;
}
long ticks = DateToTicks(y, m, d) + (time.Ticks % TicksPerDay);
Calendar.CheckAddResult(ticks, m_Cal.MinSupportedDateTime, m_Cal.MaxSupportedDateTime);
return (new DateTime(ticks));
}
// Returns the DateTime resulting from adding the given number of
// years to the specified DateTime. The result is computed by incrementing
// (or decrementing) the year part of the specified DateTime by value
// years. If the month and day of the specified DateTime is 2/29, and if the
// resulting year is not a leap year, the month and day of the resulting
// DateTime becomes 2/28. Otherwise, the month, day, and time-of-day
// parts of the result are the same as those of the specified DateTime.
//
public DateTime AddYears(DateTime time, int years)
{
return (AddMonths(time, years * 12));
}
// Returns the day-of-month part of the specified DateTime. The returned
// value is an integer between 1 and 31.
//
public int GetDayOfMonth(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartDay));
}
// Returns the day-of-week part of the specified DateTime. The returned value
// is an integer between 0 and 6, where 0 indicates Sunday, 1 indicates
// Monday, 2 indicates Tuesday, 3 indicates Wednesday, 4 indicates
// Thursday, 5 indicates Friday, and 6 indicates Saturday.
//
public DayOfWeek GetDayOfWeek(DateTime time)
{
CheckTicksRange(time.Ticks);
return ((DayOfWeek)((time.Ticks / TicksPerDay + 1) % 7));
}
// Returns the day-of-year part of the specified DateTime. The returned value
// is an integer between 1 and 366.
//
public int GetDayOfYear(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartDayOfYear));
}
// Returns the number of days in the month given by the year and
// month arguments.
//
[Pure]
public int GetDaysInMonth(int year, int month, int era)
{
//
// Convert year/era value to Gregorain year value.
//
year = GetGregorianYear(year, era);
if (month < 1 || month > 12)
{
throw new ArgumentOutOfRangeException(nameof(month), SR.ArgumentOutOfRange_Month);
}
int[] days = ((year % 4 == 0 && (year % 100 != 0 || year % 400 == 0)) ? DaysToMonth366 : DaysToMonth365);
return (days[month] - days[month - 1]);
}
// Returns the number of days in the year given by the year argument for the current era.
//
public int GetDaysInYear(int year, int era)
{
//
// Convert year/era value to Gregorain year value.
//
year = GetGregorianYear(year, era);
return ((year % 4 == 0 && (year % 100 != 0 || year % 400 == 0)) ? 366 : 365);
}
// Returns the era for the specified DateTime value.
public int GetEra(DateTime time)
{
long ticks = time.Ticks;
// The assumption here is that m_EraInfo is listed in reverse order.
for (int i = 0; i < m_EraInfo.Length; i++)
{
if (ticks >= m_EraInfo[i].ticks)
{
return (m_EraInfo[i].era);
}
}
throw new ArgumentOutOfRangeException(nameof(time), SR.ArgumentOutOfRange_Era);
}
public int[] Eras
{
get
{
if (m_eras == null)
{
m_eras = new int[m_EraInfo.Length];
for (int i = 0; i < m_EraInfo.Length; i++)
{
m_eras[i] = m_EraInfo[i].era;
}
}
return ((int[])m_eras.Clone());
}
}
// Returns the month part of the specified DateTime. The returned value is an
// integer between 1 and 12.
//
public int GetMonth(DateTime time)
{
return (GetDatePart(time.Ticks, DatePartMonth));
}
// Returns the number of months in the specified year and era.
public int GetMonthsInYear(int year, int era)
{
year = GetGregorianYear(year, era);
return (12);
}
// Returns the year part of the specified DateTime. The returned value is an
// integer between 1 and 9999.
//
public int GetYear(DateTime time)
{
long ticks = time.Ticks;
int year = GetDatePart(ticks, DatePartYear);
for (int i = 0; i < m_EraInfo.Length; i++)
{
if (ticks >= m_EraInfo[i].ticks)
{
return (year - m_EraInfo[i].yearOffset);
}
}
throw new ArgumentException(SR.Argument_NoEra);
}
// Returns the year that match the specified Gregorian year. The returned value is an
// integer between 1 and 9999.
//
public int GetYear(int year, DateTime time)
{
long ticks = time.Ticks;
for (int i = 0; i < m_EraInfo.Length; i++)
{
// while calculating dates with JapaneseLuniSolarCalendar, we can run into cases right after the start of the era
// and still belong to the month which is started in previous era. Calculating equivalent calendar date will cause
// using the new era info which will have the year offset equal to the year we are calculating year = m_EraInfo[i].yearOffset
// which will end up with zero as calendar year.
// We should use the previous era info instead to get the right year number. Example of such date is Feb 2nd 1989
if (ticks >= m_EraInfo[i].ticks && year > m_EraInfo[i].yearOffset)
{
return (year - m_EraInfo[i].yearOffset);
}
}
throw new ArgumentException(SR.Argument_NoEra);
}
// Checks whether a given day in the specified era is a leap day. This method returns true if
// the date is a leap day, or false if not.
//
public bool IsLeapDay(int year, int month, int day, int era)
{
// year/month/era checking is done in GetDaysInMonth()
if (day < 1 || day > GetDaysInMonth(year, month, era))
{
throw new ArgumentOutOfRangeException(
nameof(day),
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
1,
GetDaysInMonth(year, month, era)));
}
Contract.EndContractBlock();
if (!IsLeapYear(year, era))
{
return (false);
}
if (month == 2 && day == 29)
{
return (true);
}
return (false);
}
// Returns the leap month in a calendar year of the specified era. This method returns 0
// if this calendar does not have leap month, or this year is not a leap year.
//
public int GetLeapMonth(int year, int era)
{
year = GetGregorianYear(year, era);
return (0);
}
// Checks whether a given month in the specified era is a leap month. This method returns true if
// month is a leap month, or false if not.
//
public bool IsLeapMonth(int year, int month, int era)
{
year = GetGregorianYear(year, era);
if (month < 1 || month > 12)
{
throw new ArgumentOutOfRangeException(
nameof(month),
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range,
1,
12));
}
return (false);
}
// Checks whether a given year in the specified era is a leap year. This method returns true if
// year is a leap year, or false if not.
//
public bool IsLeapYear(int year, int era)
{
year = GetGregorianYear(year, era);
return (year % 4 == 0 && (year % 100 != 0 || year % 400 == 0));
}
// Returns the date and time converted to a DateTime value. Throws an exception if the n-tuple is invalid.
//
public DateTime ToDateTime(int year, int month, int day, int hour, int minute, int second, int millisecond, int era)
{
year = GetGregorianYear(year, era);
long ticks = DateToTicks(year, month, day) + TimeToTicks(hour, minute, second, millisecond);
CheckTicksRange(ticks);
return (new DateTime(ticks));
}
public virtual int GetWeekOfYear(DateTime time, CalendarWeekRule rule, DayOfWeek firstDayOfWeek)
{
CheckTicksRange(time.Ticks);
// Use GregorianCalendar to get around the problem that the implmentation in Calendar.GetWeekOfYear()
// can call GetYear() that exceeds the supported range of the Gregorian-based calendars.
return (GregorianCalendar.GetDefaultInstance().GetWeekOfYear(time, rule, firstDayOfWeek));
}
public int ToFourDigitYear(int year, int twoDigitYearMax)
{
if (year < 0)
{
throw new ArgumentOutOfRangeException(nameof(year),
SR.ArgumentOutOfRange_NeedPosNum);
}
Contract.EndContractBlock();
if (year < 100)
{
int y = year % 100;
return ((twoDigitYearMax / 100 - (y > twoDigitYearMax % 100 ? 1 : 0)) * 100 + y);
}
if (year < m_minYear || year > m_maxYear)
{
throw new ArgumentOutOfRangeException(
nameof(year),
String.Format(
CultureInfo.CurrentCulture,
SR.ArgumentOutOfRange_Range, m_minYear, m_maxYear));
}
// If the year value is above 100, just return the year value. Don't have to do
// the TwoDigitYearMax comparison.
return (year);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Data.Common;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Globalization;
using System.Xml;
using System.Xml.Schema;
using System.Xml.Serialization;
namespace System.Data.SqlTypes
{
/// <summary>
/// Represents the date and time data ranging in value
/// from January 1, 1753 to December 31, 9999 to an accuracy of 3.33 milliseconds
/// to be stored in or retrieved from a database.
/// </summary>
[Serializable]
[StructLayout(LayoutKind.Sequential)]
[XmlSchemaProvider("GetXsdType")]
[System.Runtime.CompilerServices.TypeForwardedFrom("System.Data, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public struct SqlDateTime : INullable, IComparable, IXmlSerializable
{
private bool m_fNotNull; // false if null. Do not rename (binary serialization)
private int m_day; // Day from 1900/1/1, could be negative. Range: Jan 1 1753 - Dec 31 9999. Do not rename (binary serialization)
private int m_time; // Time in the day in term of ticks. Do not rename (binary serialization)
// Constants
// Number of (100ns) ticks per time unit
private static readonly double s_SQLTicksPerMillisecond = 0.3;
public static readonly int SQLTicksPerSecond = 300;
public static readonly int SQLTicksPerMinute = SQLTicksPerSecond * 60;
public static readonly int SQLTicksPerHour = SQLTicksPerMinute * 60;
private static readonly int s_SQLTicksPerDay = SQLTicksPerHour * 24;
private static readonly long s_ticksPerSecond = TimeSpan.TicksPerMillisecond * 1000;
private static readonly DateTime s_SQLBaseDate = new DateTime(1900, 1, 1);
private static readonly long s_SQLBaseDateTicks = s_SQLBaseDate.Ticks;
private static readonly int s_minYear = 1753; // Jan 1 1753
private static readonly int s_maxYear = 9999; // Dec 31 9999
private static readonly int s_minDay = -53690; // Jan 1 1753
private static readonly int s_maxDay = 2958463; // Dec 31 9999 is this many days from Jan 1 1900
private static readonly int s_minTime = 0; // 00:00:0:000PM
private static readonly int s_maxTime = s_SQLTicksPerDay - 1; // = 25919999, 11:59:59:997PM
private static readonly int s_dayBase = 693595; // Jan 1 1900 is this many days from Jan 1 0001
private static readonly int[] s_daysToMonth365 = new int[] {
0, 31, 59, 90, 120, 151, 181, 212, 243, 273, 304, 334, 365};
private static readonly int[] s_daysToMonth366 = new int[] {
0, 31, 60, 91, 121, 152, 182, 213, 244, 274, 305, 335, 366};
private static readonly DateTime s_minDateTime = new DateTime(1753, 1, 1);
private static readonly DateTime s_maxDateTime = DateTime.MaxValue;
private static readonly TimeSpan s_minTimeSpan = s_minDateTime.Subtract(s_SQLBaseDate);
private static readonly TimeSpan s_maxTimeSpan = s_maxDateTime.Subtract(s_SQLBaseDate);
private static readonly string s_ISO8601_DateTimeFormat = "yyyy-MM-ddTHH:mm:ss.fff";
// These formats are valid styles in SQL Server (style 9, 12, 13, 14)
// but couldn't be recognized by the default parse. Needs to call
// ParseExact in addition to recognize them.
private static readonly string[] s_dateTimeFormats = {
"MMM d yyyy hh:mm:ss:ffftt",
"MMM d yyyy hh:mm:ss:fff",
"d MMM yyyy hh:mm:ss:ffftt",
"d MMM yyyy hh:mm:ss:fff",
"hh:mm:ss:ffftt",
"hh:mm:ss:fff",
"yyMMdd",
"yyyyMMdd"
};
private const DateTimeStyles x_DateTimeStyle = DateTimeStyles.AllowWhiteSpaces;
// construct a Null
private SqlDateTime(bool fNull)
{
m_fNotNull = false;
m_day = 0;
m_time = 0;
}
public SqlDateTime(DateTime value)
{
this = FromDateTime(value);
}
public SqlDateTime(int year, int month, int day)
: this(year, month, day, 0, 0, 0, 0.0)
{
}
public SqlDateTime(int year, int month, int day, int hour, int minute, int second)
: this(year, month, day, hour, minute, second, 0.0)
{
}
public SqlDateTime(int year, int month, int day, int hour, int minute, int second, double millisecond)
{
if (year >= s_minYear && year <= s_maxYear && month >= 1 && month <= 12)
{
int[] days = IsLeapYear(year) ? s_daysToMonth366 : s_daysToMonth365;
if (day >= 1 && day <= days[month] - days[month - 1])
{
int y = year - 1;
int dayticks = y * 365 + y / 4 - y / 100 + y / 400 + days[month - 1] + day - 1;
dayticks -= s_dayBase;
if (dayticks >= s_minDay && dayticks <= s_maxDay &&
hour >= 0 && hour < 24 && minute >= 0 && minute < 60 &&
second >= 0 && second < 60 && millisecond >= 0 && millisecond < 1000.0)
{
double ticksForMilisecond = millisecond * s_SQLTicksPerMillisecond + 0.5;
int timeticks = hour * SQLTicksPerHour + minute * SQLTicksPerMinute + second * SQLTicksPerSecond +
(int)ticksForMilisecond;
if (timeticks > s_maxTime)
{
// Only rounding up could cause time to become greater than MaxTime.
Debug.Assert(timeticks == s_maxTime + 1);
// Make time to be zero, and increment day.
timeticks = 0;
dayticks++;
}
// Success. Call ctor here which will again check dayticks and timeticks are within range.
// All other cases will throw exception below.
this = new SqlDateTime(dayticks, timeticks);
return;
}
}
}
throw new SqlTypeException(SQLResource.InvalidDateTimeMessage);
}
// constructor that take DBTIMESTAMP data members
// Note: bilisecond is same as 'fraction' in DBTIMESTAMP
public SqlDateTime(int year, int month, int day, int hour, int minute, int second, int bilisecond)
: this(year, month, day, hour, minute, second, bilisecond / 1000.0)
{
}
public SqlDateTime(int dayTicks, int timeTicks)
{
if (dayTicks < s_minDay || dayTicks > s_maxDay || timeTicks < s_minTime || timeTicks > s_maxTime)
{
m_fNotNull = false;
throw new OverflowException(SQLResource.DateTimeOverflowMessage);
}
m_day = dayTicks;
m_time = timeTicks;
m_fNotNull = true;
}
internal SqlDateTime(double dblVal)
{
if ((dblVal < s_minDay) || (dblVal >= s_maxDay + 1))
throw new OverflowException(SQLResource.DateTimeOverflowMessage);
int day = (int)dblVal;
int time = (int)((dblVal - day) * s_SQLTicksPerDay);
// Check if we need to borrow a day from the day portion.
if (time < 0)
{
day--;
time += s_SQLTicksPerDay;
}
else if (time >= s_SQLTicksPerDay)
{
// Deal with case where time portion = 24 hrs.
//
// ISSUE: Is this code reachable? For this code to be reached there
// must be a value for dblVal such that:
// dblVal - (long)dblVal = 1.0
// This seems odd, but there was a bug that resulted because
// there was a negative value for dblVal such that dblVal + 1.0 = 1.0
//
day++;
time -= s_SQLTicksPerDay;
}
this = new SqlDateTime(day, time);
}
// INullable
public bool IsNull
{
get { return !m_fNotNull; }
}
private static TimeSpan ToTimeSpan(SqlDateTime value)
{
long millisecond = (long)(value.m_time / s_SQLTicksPerMillisecond + 0.5);
return new TimeSpan(value.m_day * TimeSpan.TicksPerDay +
millisecond * TimeSpan.TicksPerMillisecond);
}
private static DateTime ToDateTime(SqlDateTime value)
{
return s_SQLBaseDate.Add(ToTimeSpan(value));
}
// Used by SqlBuffer in SqlClient.
internal static DateTime ToDateTime(int daypart, int timepart)
{
if (daypart < s_minDay || daypart > s_maxDay || timepart < s_minTime || timepart > s_maxTime)
{
throw new OverflowException(SQLResource.DateTimeOverflowMessage);
}
long dayticks = daypart * TimeSpan.TicksPerDay;
long timeticks = ((long)(timepart / s_SQLTicksPerMillisecond + 0.5)) * TimeSpan.TicksPerMillisecond;
DateTime result = new DateTime(s_SQLBaseDateTicks + dayticks + timeticks);
return result;
}
// Convert from TimeSpan, rounded to one three-hundredth second, due to loss of precision
private static SqlDateTime FromTimeSpan(TimeSpan value)
{
if (value < s_minTimeSpan || value > s_maxTimeSpan)
throw new SqlTypeException(SQLResource.DateTimeOverflowMessage);
int day = value.Days;
long ticks = value.Ticks - day * TimeSpan.TicksPerDay;
if (ticks < 0L)
{
day--;
ticks += TimeSpan.TicksPerDay;
}
int time = (int)((double)ticks / TimeSpan.TicksPerMillisecond * s_SQLTicksPerMillisecond + 0.5);
if (time > s_maxTime)
{
// Only rounding up could cause time to become greater than MaxTime.
Debug.Assert(time == s_maxTime + 1);
// Make time to be zero, and increment day.
time = 0;
day++;
}
return new SqlDateTime(day, time);
}
private static SqlDateTime FromDateTime(DateTime value)
{
// SqlDateTime has smaller precision and range than DateTime.
// Usually we round the DateTime value to the nearest SqlDateTime value.
// but for DateTime.MaxValue, if we round it up, it will overflow.
// Although the overflow would be the correct behavior, we simply
// returned SqlDateTime.MaxValue in v1. In order not to break existing
// code, we'll keep this logic.
//
if (value == DateTime.MaxValue)
return SqlDateTime.MaxValue;
return FromTimeSpan(value.Subtract(s_SQLBaseDate));
}
/*
internal static SqlDateTime FromDouble(double dblVal) {
return new SqlDateTime(dblVal);
}
internal static double ToDouble(SqlDateTime x) {
AssertValidSqlDateTime(x);
return(double)x.m_day + ((double)x.m_time / (double)SQLTicksPerDay);
}
internal static int ToInt(SqlDateTime x) {
AssertValidSqlDateTime(x);
return x.m_time >= MaxTime / 2 ? x.m_day + 1 : x.m_day;
}
*/
// do we still want to define a property of DateTime? If the user uses it often, it is expensive
// property: Value
public DateTime Value
{
get
{
if (m_fNotNull)
return ToDateTime(this);
else
throw new SqlNullValueException();
}
}
// Day ticks -- returns number of days since 1/1/1900
public int DayTicks
{
get
{
if (m_fNotNull)
return m_day;
else
throw new SqlNullValueException();
}
}
// Time ticks -- return daily time in unit of 1/300 second
public int TimeTicks
{
get
{
if (m_fNotNull)
return m_time;
else
throw new SqlNullValueException();
}
}
// Implicit conversion from DateTime to SqlDateTime
public static implicit operator SqlDateTime(DateTime value)
{
return new SqlDateTime(value);
}
// Explicit conversion from SqlDateTime to int. Returns 0 if x is Null.
public static explicit operator DateTime(SqlDateTime x)
{
return ToDateTime(x);
}
// Return string representation of SqlDateTime
public override string ToString()
{
if (IsNull)
return SQLResource.NullString;
DateTime dateTime = ToDateTime(this);
return dateTime.ToString((IFormatProvider)null);
}
public static SqlDateTime Parse(string s)
{
DateTime dt;
if (s == SQLResource.NullString)
return SqlDateTime.Null;
try
{
dt = DateTime.Parse(s, CultureInfo.InvariantCulture);
}
catch (FormatException)
{
DateTimeFormatInfo dtfi = (DateTimeFormatInfo)(CultureInfo.CurrentCulture.GetFormat(typeof(DateTimeFormatInfo)));
dt = DateTime.ParseExact(s, s_dateTimeFormats, dtfi, x_DateTimeStyle);
}
return new SqlDateTime(dt);
}
// Binary operators
// Arithmetic operators
// Alternative method: SqlDateTime.Add
public static SqlDateTime operator +(SqlDateTime x, TimeSpan t)
{
return x.IsNull ? Null : FromDateTime(ToDateTime(x) + t);
}
// Alternative method: SqlDateTime.Subtract
public static SqlDateTime operator -(SqlDateTime x, TimeSpan t)
{
return x.IsNull ? Null : FromDateTime(ToDateTime(x) - t);
}
//--------------------------------------------------
// Alternative methods for overloaded operators
//--------------------------------------------------
// Alternative method for operator +
public static SqlDateTime Add(SqlDateTime x, TimeSpan t)
{
return x + t;
}
// Alternative method for operator -
public static SqlDateTime Subtract(SqlDateTime x, TimeSpan t)
{
return x - t;
}
/*
// Implicit conversions
// Implicit conversion from SqlBoolean to SqlDateTime
public static implicit operator SqlDateTime(SqlBoolean x)
{
return x.IsNull ? Null : new SqlDateTime(x.Value, 0);
}
// Implicit conversion from SqlInt32 to SqlDateTime
public static implicit operator SqlDateTime(SqlInt32 x)
{
return x.IsNull ? Null : new SqlDateTime(x.Value, 0);
}
// Implicit conversion from SqlMoney to SqlDateTime
public static implicit operator SqlDateTime(SqlMoney x)
{
return x.IsNull ? Null : SqlDateTime.FromDouble(x.ToDouble());
}
// Explicit conversions
// Explicit conversion from SqlDateTime to SqlInt32
public static explicit operator SqlInt32(SqlDateTime x)
{
if (x.IsNull)
return SqlInt32.Null;
return new SqlInt32(SqlDateTime.ToInt(x));
}
// Explicit conversion from SqlDateTime to SqlBoolean
public static explicit operator SqlBoolean(SqlDateTime x)
{
if (x.IsNull)
return SqlBoolean.Null;
return new SqlBoolean(x.m_day != 0 || x.m_time != 0, false);
}
// Explicit conversion from SqlDateTime to SqlMoney
public static explicit operator SqlMoney(SqlDateTime x)
{
return x.IsNull ? SqlMoney.Null : new SqlMoney(SqlDateTime.ToDouble(x));
}
// Implicit conversion from SqlDouble to SqlDateTime
public static implicit operator SqlDateTime(SqlDouble x)
{
return x.IsNull ? Null : new SqlDateTime(x.Value);
}
// Explicit conversion from SqlDateTime to SqlDouble
public static explicit operator SqlDouble(SqlDateTime x)
{
return x.IsNull ? SqlDouble.Null : new SqlDouble(SqlDateTime.ToDouble(x));
}
// Implicit conversion from SqlDecimal to SqlDateTime
public static implicit operator SqlDateTime(SqlDecimal x)
{
return x.IsNull ? SqlDateTime.Null : new SqlDateTime(SqlDecimal.ToDouble(x));
}
// Explicit conversion from SqlDateTime to SqlDecimal
public static explicit operator SqlDecimal(SqlDateTime x)
{
return x.IsNull ? SqlDecimal.Null : new SqlDecimal(SqlDateTime.ToDouble(x));
}
*/
// Explicit conversion from SqlString to SqlDateTime
// Throws FormatException or OverflowException if necessary.
public static explicit operator SqlDateTime(SqlString x)
{
return x.IsNull ? SqlDateTime.Null : SqlDateTime.Parse(x.Value);
}
// Builtin functions
// utility functions
/*
private static void AssertValidSqlDateTime(SqlDateTime x) {
Debug.Assert(!x.IsNull, "!x.IsNull", "Datetime: Null");
Debug.Assert(x.m_day >= MinDay && x.m_day <= MaxDay, "day >= MinDay && day <= MaxDay",
"DateTime: Day out of range");
Debug.Assert(x.m_time >= MinTime && x.m_time <= MaxTime, "time >= MinTime && time <= MaxTime",
"DateTime: Time out of range");
}
*/
// Checks whether a given year is a leap year. This method returns true if
// "year" is a leap year, or false if not.
//
// @param year The year to check.
// @return true if "year" is a leap year, false otherwise.
//
private static bool IsLeapYear(int year)
{
return year % 4 == 0 && (year % 100 != 0 || year % 400 == 0);
}
// Overloading comparison operators
public static SqlBoolean operator ==(SqlDateTime x, SqlDateTime y)
{
return (x.IsNull || y.IsNull) ? SqlBoolean.Null : new SqlBoolean(x.m_day == y.m_day && x.m_time == y.m_time);
}
public static SqlBoolean operator !=(SqlDateTime x, SqlDateTime y)
{
return !(x == y);
}
public static SqlBoolean operator <(SqlDateTime x, SqlDateTime y)
{
return (x.IsNull || y.IsNull) ? SqlBoolean.Null :
new SqlBoolean(x.m_day < y.m_day || (x.m_day == y.m_day && x.m_time < y.m_time));
}
public static SqlBoolean operator >(SqlDateTime x, SqlDateTime y)
{
return (x.IsNull || y.IsNull) ? SqlBoolean.Null :
new SqlBoolean(x.m_day > y.m_day || (x.m_day == y.m_day && x.m_time > y.m_time));
}
public static SqlBoolean operator <=(SqlDateTime x, SqlDateTime y)
{
return (x.IsNull || y.IsNull) ? SqlBoolean.Null :
new SqlBoolean(x.m_day < y.m_day || (x.m_day == y.m_day && x.m_time <= y.m_time));
}
public static SqlBoolean operator >=(SqlDateTime x, SqlDateTime y)
{
return (x.IsNull || y.IsNull) ? SqlBoolean.Null :
new SqlBoolean(x.m_day > y.m_day || (x.m_day == y.m_day && x.m_time >= y.m_time));
}
//--------------------------------------------------
// Alternative methods for overloaded operators
//--------------------------------------------------
// Alternative method for operator ==
public static SqlBoolean Equals(SqlDateTime x, SqlDateTime y)
{
return (x == y);
}
// Alternative method for operator !=
public static SqlBoolean NotEquals(SqlDateTime x, SqlDateTime y)
{
return (x != y);
}
// Alternative method for operator <
public static SqlBoolean LessThan(SqlDateTime x, SqlDateTime y)
{
return (x < y);
}
// Alternative method for operator >
public static SqlBoolean GreaterThan(SqlDateTime x, SqlDateTime y)
{
return (x > y);
}
// Alternative method for operator <=
public static SqlBoolean LessThanOrEqual(SqlDateTime x, SqlDateTime y)
{
return (x <= y);
}
// Alternative method for operator >=
public static SqlBoolean GreaterThanOrEqual(SqlDateTime x, SqlDateTime y)
{
return (x >= y);
}
// Alternative method for conversions.
public SqlString ToSqlString()
{
return (SqlString)this;
}
// IComparable
// Compares this object to another object, returning an integer that
// indicates the relationship.
// Returns a value less than zero if this < object, zero if this = object,
// or a value greater than zero if this > object.
// null is considered to be less than any instance.
// If object is not of same type, this method throws an ArgumentException.
public int CompareTo(object value)
{
if (value is SqlDateTime)
{
SqlDateTime i = (SqlDateTime)value;
return CompareTo(i);
}
throw ADP.WrongType(value.GetType(), typeof(SqlDateTime));
}
public int CompareTo(SqlDateTime value)
{
// If both Null, consider them equal.
// Otherwise, Null is less than anything.
if (IsNull)
return value.IsNull ? 0 : -1;
else if (value.IsNull)
return 1;
if (this < value) return -1;
if (this > value) return 1;
return 0;
}
// Compares this instance with a specified object
public override bool Equals(object value)
{
if (!(value is SqlDateTime))
{
return false;
}
SqlDateTime i = (SqlDateTime)value;
if (i.IsNull || IsNull)
return (i.IsNull && IsNull);
else
return (this == i).Value;
}
// For hashing purpose
public override int GetHashCode()
{
return IsNull ? 0 : Value.GetHashCode();
}
XmlSchema IXmlSerializable.GetSchema() { return null; }
void IXmlSerializable.ReadXml(XmlReader reader)
{
string isNull = reader.GetAttribute("nil", XmlSchema.InstanceNamespace);
if (isNull != null && XmlConvert.ToBoolean(isNull))
{
// Read the next value.
reader.ReadElementString();
m_fNotNull = false;
}
else
{
DateTime dt = XmlConvert.ToDateTime(reader.ReadElementString(), XmlDateTimeSerializationMode.RoundtripKind);
// We do not support any kind of timezone information that is
// possibly included in the CLR DateTime, since SQL Server
// does not support TZ info. If any was specified, error out.
//
if (dt.Kind != System.DateTimeKind.Unspecified)
{
throw new SqlTypeException(SQLResource.TimeZoneSpecifiedMessage);
}
SqlDateTime st = FromDateTime(dt);
m_day = st.DayTicks;
m_time = st.TimeTicks;
m_fNotNull = true;
}
}
void IXmlSerializable.WriteXml(XmlWriter writer)
{
if (IsNull)
{
writer.WriteAttributeString("xsi", "nil", XmlSchema.InstanceNamespace, "true");
}
else
{
writer.WriteString(XmlConvert.ToString(Value, s_ISO8601_DateTimeFormat));
}
}
public static XmlQualifiedName GetXsdType(XmlSchemaSet schemaSet)
{
return new XmlQualifiedName("dateTime", XmlSchema.Namespace);
}
public static readonly SqlDateTime MinValue = new SqlDateTime(s_minDay, 0);
public static readonly SqlDateTime MaxValue = new SqlDateTime(s_maxDay, s_maxTime);
public static readonly SqlDateTime Null = new SqlDateTime(true);
} // SqlDateTime
} // namespace System.Data.SqlTypes
| |
// Copyright (c) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
using System;
using System.ComponentModel.Composition;
using System.ComponentModel.Composition.Hosting;
using System.IO;
using Microsoft.Practices.Prism.Logging;
using Microsoft.Practices.Prism.MefExtensions.Modularity;
using Microsoft.Practices.Prism.Modularity;
using Microsoft.VisualStudio.TestTools.UnitTesting;
using Moq;
namespace Microsoft.Practices.Prism.MefExtensions.Tests
{
public partial class MefModuleManagerFixture
{
[TestMethod]
public void ConstructorThrowsWithNullModuleInitializer()
{
try
{
new MefModuleManager(null, new Mock<IModuleCatalog>().Object, new Mock<ILoggerFacade>().Object);
Assert.Fail("No exception thrown when expected");
}
catch (ArgumentNullException ex)
{
Assert.AreEqual("moduleInitializer", ex.ParamName);
}
}
[TestMethod]
public void ConstructorThrowsWithNullModuleCatalog()
{
try
{
new MefModuleManager(new Mock<IModuleInitializer>().Object, null, new Mock<ILoggerFacade>().Object);
Assert.Fail("No exception thrown when expected");
}
catch (ArgumentNullException ex)
{
Assert.AreEqual("moduleCatalog", ex.ParamName);
}
}
[TestMethod]
public void ConstructorThrowsWithNullLogger()
{
try
{
new MefModuleManager(new Mock<IModuleInitializer>().Object, new Mock<IModuleCatalog>().Object, null);
Assert.Fail("No exception thrown when expected");
}
catch (ArgumentNullException ex)
{
Assert.AreEqual("loggerFacade", ex.ParamName);
}
}
#if DEBUG
[DeploymentItem(@"..\..\..\MefModulesForTesting\bin\debug\MefModulesForTesting.dll")]
#else
[DeploymentItem(@"..\..\..\MefModulesForTesting\bin\release\MefModulesForTesting.dll")]
#endif
[TestMethod]
public void ModuleInUnreferencedAssemblyInitializedByModuleInitializer()
{
AssemblyCatalog assemblyCatalog = new AssemblyCatalog(GetPathToModuleDll());
CompositionContainer compositionContainer = new CompositionContainer(assemblyCatalog);
ModuleCatalog moduleCatalog = new ModuleCatalog();
Mock<MefFileModuleTypeLoader> mockFileTypeLoader = new Mock<MefFileModuleTypeLoader>();
compositionContainer.ComposeExportedValue<IModuleCatalog>(moduleCatalog);
compositionContainer.ComposeExportedValue<MefFileModuleTypeLoader>(mockFileTypeLoader.Object);
bool wasInit = false;
var mockModuleInitializer = new Mock<IModuleInitializer>();
mockModuleInitializer.Setup(x => x.Initialize(It.IsAny<ModuleInfo>())).Callback(() => wasInit = true);
var mockLoggerFacade = new Mock<ILoggerFacade>();
MefModuleManager moduleManager = new MefModuleManager(
mockModuleInitializer.Object,
moduleCatalog,
mockLoggerFacade.Object);
compositionContainer.SatisfyImportsOnce(moduleManager);
moduleManager.Run();
Assert.IsTrue(wasInit);
}
#if DEBUG
[DeploymentItem(@"..\..\..\MefModulesForTesting\bin\debug\MefModulesForTesting.dll")]
#else
[DeploymentItem(@"..\..\..\MefModulesForTesting\bin\release\MefModulesForTesting.dll")]
#endif
[TestMethod]
public void DeclaredModuleWithoutTypeInUnreferencedAssemblyIsUpdatedWithTypeNameFromExportAttribute()
{
AggregateCatalog aggregateCatalog = new AggregateCatalog();
CompositionContainer compositionContainer = new CompositionContainer(aggregateCatalog);
var mockFileTypeLoader = new Mock<MefFileModuleTypeLoader>();
mockFileTypeLoader.Setup(tl => tl.CanLoadModuleType(It.IsAny<ModuleInfo>())).Returns(true);
ModuleCatalog moduleCatalog = new ModuleCatalog();
ModuleInfo moduleInfo = new ModuleInfo { ModuleName = "MefModuleOne" };
moduleCatalog.AddModule(moduleInfo);
compositionContainer.ComposeExportedValue<IModuleCatalog>(moduleCatalog);
compositionContainer.ComposeExportedValue<MefFileModuleTypeLoader>(mockFileTypeLoader.Object);
bool wasInit = false;
var mockModuleInitializer = new Mock<IModuleInitializer>();
mockModuleInitializer.Setup(x => x.Initialize(It.IsAny<ModuleInfo>())).Callback(() => wasInit = true);
var mockLoggerFacade = new Mock<ILoggerFacade>();
MefModuleManager moduleManager = new MefModuleManager(
mockModuleInitializer.Object,
moduleCatalog,
mockLoggerFacade.Object);
compositionContainer.SatisfyImportsOnce(moduleManager);
moduleManager.Run();
Assert.IsFalse(wasInit);
AssemblyCatalog assemblyCatalog = new AssemblyCatalog(GetPathToModuleDll());
aggregateCatalog.Catalogs.Add(assemblyCatalog);
compositionContainer.SatisfyImportsOnce(moduleManager);
mockFileTypeLoader.Raise(tl => tl.LoadModuleCompleted += null, new LoadModuleCompletedEventArgs(moduleInfo, null));
Assert.AreEqual("MefModulesForTesting.MefModuleOne, MefModulesForTesting, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", moduleInfo.ModuleType);
Assert.IsTrue(wasInit);
}
#if DEBUG
[DeploymentItem(@"..\..\..\MefModulesForTesting\bin\debug\MefModulesForTesting.dll")]
#else
[DeploymentItem(@"..\..\..\MefModulesForTesting\bin\release\MefModulesForTesting.dll")]
#endif
[TestMethod]
public void DeclaredModuleWithTypeInUnreferencedAssemblyIsUpdatedWithTypeNameFromExportAttribute()
{
AggregateCatalog aggregateCatalog = new AggregateCatalog();
CompositionContainer compositionContainer = new CompositionContainer(aggregateCatalog);
var mockFileTypeLoader = new Mock<MefFileModuleTypeLoader>();
mockFileTypeLoader.Setup(tl => tl.CanLoadModuleType(It.IsAny<ModuleInfo>())).Returns(true);
ModuleCatalog moduleCatalog = new ModuleCatalog();
ModuleInfo moduleInfo = new ModuleInfo { ModuleName = "MefModuleOne", ModuleType = "some type" };
moduleCatalog.AddModule(moduleInfo);
compositionContainer.ComposeExportedValue<IModuleCatalog>(moduleCatalog);
compositionContainer.ComposeExportedValue<MefFileModuleTypeLoader>(mockFileTypeLoader.Object);
bool wasInit = false;
var mockModuleInitializer = new Mock<IModuleInitializer>();
mockModuleInitializer.Setup(x => x.Initialize(It.IsAny<ModuleInfo>())).Callback(() => wasInit = true);
var mockLoggerFacade = new Mock<ILoggerFacade>();
MefModuleManager moduleManager = new MefModuleManager(
mockModuleInitializer.Object,
moduleCatalog,
mockLoggerFacade.Object);
compositionContainer.SatisfyImportsOnce(moduleManager);
moduleManager.Run();
Assert.IsFalse(wasInit);
AssemblyCatalog assemblyCatalog = new AssemblyCatalog(GetPathToModuleDll());
aggregateCatalog.Catalogs.Add(assemblyCatalog);
compositionContainer.SatisfyImportsOnce(moduleManager);
mockFileTypeLoader.Raise(tl => tl.LoadModuleCompleted += null, new LoadModuleCompletedEventArgs(moduleInfo, null));
Assert.AreEqual("MefModulesForTesting.MefModuleOne, MefModulesForTesting, Version=1.0.0.0, Culture=neutral, PublicKeyToken=null", moduleInfo.ModuleType);
Assert.IsTrue(wasInit);
}
// Due to different test runners and file locations, this helper function will help find the
// necessary DLL for tests to execute.
private static string GetPathToModuleDll()
{
const string debugDll = @"..\..\..\MefModulesForTesting\bin\debug\MefModulesForTesting.dll";
const string releaseDll = @"..\..\..\MefModulesForTesting\bin\release\MefModulesForTesting.dll";
string fileLocation = null;
if (File.Exists("MefModulesForTesting.dll"))
{
fileLocation = "MefModulesForTesting.dll";
}
else if (File.Exists(debugDll))
{
fileLocation = debugDll;
}
else if (File.Exists(releaseDll))
{
fileLocation = releaseDll;
}
else
{
Assert.Fail("Cannot find module for testing");
}
return fileLocation;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Collections.Immutable;
using System.Linq;
using Analyzer.Utilities;
using Analyzer.Utilities.Extensions;
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Diagnostics;
using Microsoft.CodeAnalysis.Operations;
namespace Microsoft.NetCore.Analyzers.Runtime
{
/// <summary>
/// CA1307: Specify StringComparison
/// </summary>
[DiagnosticAnalyzer(LanguageNames.CSharp, LanguageNames.VisualBasic)]
public sealed class SpecifyStringComparisonAnalyzer : DiagnosticAnalyzer
{
private const string RuleId_CA1307 = "CA1307";
private const string RuleId_CA1310 = "CA1310";
private static readonly ImmutableArray<string> s_CA1310MethodNamesWithFirstStringParameter =
ImmutableArray.Create("Compare", "StartsWith", "EndsWith", "IndexOf", "LastIndexOf");
private static readonly LocalizableString s_localizableCA1307Title = new LocalizableResourceString(nameof(MicrosoftNetCoreAnalyzersResources.SpecifyStringComparisonCA1307Title), MicrosoftNetCoreAnalyzersResources.ResourceManager, typeof(MicrosoftNetCoreAnalyzersResources));
private static readonly LocalizableString s_localizableCA1307Message = new LocalizableResourceString(nameof(MicrosoftNetCoreAnalyzersResources.SpecifyStringComparisonCA1307Message), MicrosoftNetCoreAnalyzersResources.ResourceManager, typeof(MicrosoftNetCoreAnalyzersResources));
private static readonly LocalizableString s_localizableCA1307Description = new LocalizableResourceString(nameof(MicrosoftNetCoreAnalyzersResources.SpecifyStringComparisonCA1307Description), MicrosoftNetCoreAnalyzersResources.ResourceManager, typeof(MicrosoftNetCoreAnalyzersResources));
internal static DiagnosticDescriptor Rule_CA1307 = DiagnosticDescriptorHelper.Create(RuleId_CA1307,
s_localizableCA1307Title,
s_localizableCA1307Message,
DiagnosticCategory.Globalization,
RuleLevel.Disabled,
description: s_localizableCA1307Description,
isPortedFxCopRule: true,
isDataflowRule: false);
private static readonly LocalizableString s_localizableCA1310Title = new LocalizableResourceString(nameof(MicrosoftNetCoreAnalyzersResources.SpecifyStringComparisonCA1310Title), MicrosoftNetCoreAnalyzersResources.ResourceManager, typeof(MicrosoftNetCoreAnalyzersResources));
private static readonly LocalizableString s_localizableCA1310Message = new LocalizableResourceString(nameof(MicrosoftNetCoreAnalyzersResources.SpecifyStringComparisonCA1310Message), MicrosoftNetCoreAnalyzersResources.ResourceManager, typeof(MicrosoftNetCoreAnalyzersResources));
private static readonly LocalizableString s_localizableCA1310Description = new LocalizableResourceString(nameof(MicrosoftNetCoreAnalyzersResources.SpecifyStringComparisonCA1310Description), MicrosoftNetCoreAnalyzersResources.ResourceManager, typeof(MicrosoftNetCoreAnalyzersResources));
internal static DiagnosticDescriptor Rule_CA1310 = DiagnosticDescriptorHelper.Create(RuleId_CA1310,
s_localizableCA1310Title,
s_localizableCA1310Message,
DiagnosticCategory.Globalization,
RuleLevel.IdeHidden_BulkConfigurable,
description: s_localizableCA1310Description,
isPortedFxCopRule: false,
isDataflowRule: false);
public override ImmutableArray<DiagnosticDescriptor> SupportedDiagnostics => ImmutableArray.Create(Rule_CA1307, Rule_CA1310);
public override void Initialize(AnalysisContext context)
{
context.EnableConcurrentExecution();
context.ConfigureGeneratedCodeAnalysis(GeneratedCodeAnalysisFlags.None);
context.RegisterCompilationStartAction(csaContext =>
{
var stringComparisonType = csaContext.Compilation.GetOrCreateTypeByMetadataName(WellKnownTypeNames.SystemStringComparison);
var stringType = csaContext.Compilation.GetSpecialType(SpecialType.System_String);
// Without these symbols the rule cannot run
if (stringComparisonType == null || stringType == null)
{
return;
}
var overloadMap = GetWellKnownStringOverloads(csaContext.Compilation, stringType, stringComparisonType);
var linqExpressionType = csaContext.Compilation.GetOrCreateTypeByMetadataName(WellKnownTypeNames.SystemLinqExpressionsExpression1);
csaContext.RegisterOperationAction(oaContext =>
{
var invocationExpression = (IInvocationOperation)oaContext.Operation;
var targetMethod = invocationExpression.TargetMethod;
if (targetMethod.IsGenericMethod ||
targetMethod.ContainingType == null ||
targetMethod.ContainingType.IsErrorType())
{
return;
}
// Check if we are in a Expression<Func<T...>> context, in which case it is possible
// that the underlying call doesn't have the comparison option so we want to bail-out.
if (invocationExpression.IsWithinExpressionTree(linqExpressionType))
{
return;
}
// Report correctness issue CA1310 for known string comparison methods that default to culture specific string comparison:
// https://docs.microsoft.com/dotnet/standard/base-types/best-practices-strings#string-comparisons-that-use-the-current-culture
if (targetMethod.ContainingType.SpecialType == SpecialType.System_String &&
!overloadMap.IsEmpty &&
overloadMap.ContainsKey(targetMethod))
{
ReportDiagnostic(
Rule_CA1310,
oaContext,
invocationExpression,
targetMethod,
overloadMap[targetMethod]);
return;
}
// Report maintainability issue CA1307 for any method that has an additional overload with the exact same parameter list,
// plus as additional StringComparison parameter. Default StringComparison may or may not match user's intent,
// but it is recommended to explicitly specify it for clarity and readability:
// https://docs.microsoft.com/dotnet/standard/base-types/best-practices-strings#recommendations-for-string-usage
IEnumerable<IMethodSymbol> methodsWithSameNameAsTargetMethod = targetMethod.ContainingType.GetMembers(targetMethod.Name).OfType<IMethodSymbol>();
if (methodsWithSameNameAsTargetMethod.HasMoreThan(1))
{
var correctOverload = methodsWithSameNameAsTargetMethod
.GetMethodOverloadsWithDesiredParameterAtTrailing(targetMethod, stringComparisonType)
.FirstOrDefault();
if (correctOverload != null)
{
ReportDiagnostic(
Rule_CA1307,
oaContext,
invocationExpression,
targetMethod,
correctOverload);
}
}
}, OperationKind.Invocation);
});
static ImmutableDictionary<IMethodSymbol, IMethodSymbol> GetWellKnownStringOverloads(
Compilation compilation,
INamedTypeSymbol stringType,
INamedTypeSymbol stringComparisonType)
{
var objectType = compilation.GetSpecialType(SpecialType.System_Object);
var booleanType = compilation.GetSpecialType(SpecialType.System_Boolean);
var integerType = compilation.GetSpecialType(SpecialType.System_Int32);
var stringCompareToNamedMethods = stringType.GetMembers("CompareTo").OfType<IMethodSymbol>();
var stringCompareToParameterString = stringCompareToNamedMethods.GetFirstOrDefaultMemberWithParameterInfos(
GetParameterInfo(stringType));
var stringCompareToParameterObject = stringCompareToNamedMethods.GetFirstOrDefaultMemberWithParameterInfos(
GetParameterInfo(objectType));
var stringCompareNamedMethods = stringType.GetMembers("Compare").OfType<IMethodSymbol>();
var stringCompareParameterStringStringBool = stringCompareNamedMethods.GetFirstOrDefaultMemberWithParameterInfos(
GetParameterInfo(stringType),
GetParameterInfo(stringType),
GetParameterInfo(booleanType));
var stringCompareParameterStringStringStringComparison = stringCompareNamedMethods.GetFirstOrDefaultMemberWithParameterInfos(
GetParameterInfo(stringType),
GetParameterInfo(stringType),
GetParameterInfo(stringComparisonType));
var stringCompareParameterStringIntStringIntIntBool = stringCompareNamedMethods.GetFirstOrDefaultMemberWithParameterInfos(
GetParameterInfo(stringType),
GetParameterInfo(integerType),
GetParameterInfo(stringType),
GetParameterInfo(integerType),
GetParameterInfo(integerType),
GetParameterInfo(booleanType));
var stringCompareParameterStringIntStringIntIntComparison = stringCompareNamedMethods.GetFirstOrDefaultMemberWithParameterInfos(
GetParameterInfo(stringType),
GetParameterInfo(integerType),
GetParameterInfo(stringType),
GetParameterInfo(integerType),
GetParameterInfo(integerType),
GetParameterInfo(stringComparisonType));
var overloadMapBuilder = ImmutableDictionary.CreateBuilder<IMethodSymbol, IMethodSymbol>();
overloadMapBuilder.AddKeyValueIfNotNull(stringCompareToParameterString, stringCompareParameterStringStringStringComparison);
overloadMapBuilder.AddKeyValueIfNotNull(stringCompareToParameterObject, stringCompareParameterStringStringStringComparison);
overloadMapBuilder.AddKeyValueIfNotNull(stringCompareParameterStringStringBool, stringCompareParameterStringStringStringComparison);
overloadMapBuilder.AddKeyValueIfNotNull(stringCompareParameterStringIntStringIntIntBool, stringCompareParameterStringIntStringIntIntComparison);
foreach (var methodName in s_CA1310MethodNamesWithFirstStringParameter)
{
var methodsWithMethodName = stringType.GetMembers(methodName).OfType<IMethodSymbol>();
foreach (var method in methodsWithMethodName)
{
if (!method.Parameters.IsEmpty &&
method.Parameters[0].Type.SpecialType == SpecialType.System_String &&
!method.Parameters[^1].Type.Equals(stringComparisonType))
{
var recommendedMethod = methodsWithMethodName
.GetMethodOverloadsWithDesiredParameterAtTrailing(method, stringComparisonType)
.FirstOrDefault();
if (recommendedMethod != null)
{
overloadMapBuilder.AddKeyValueIfNotNull(method, recommendedMethod);
}
}
}
}
return overloadMapBuilder.ToImmutable();
}
}
private static void ReportDiagnostic(
DiagnosticDescriptor rule,
OperationAnalysisContext oaContext,
IInvocationOperation invocationExpression,
IMethodSymbol targetMethod,
IMethodSymbol correctOverload)
{
oaContext.ReportDiagnostic(
invocationExpression.CreateDiagnostic(
rule,
targetMethod.ToDisplayString(SymbolDisplayFormat.CSharpErrorMessageFormat),
oaContext.ContainingSymbol.ToDisplayString(SymbolDisplayFormat.CSharpErrorMessageFormat),
correctOverload.ToDisplayString(SymbolDisplayFormat.CSharpErrorMessageFormat)));
}
private static ParameterInfo GetParameterInfo(INamedTypeSymbol type, bool isArray = false, int arrayRank = 0, bool isParams = false)
{
return ParameterInfo.GetParameterInfo(type, isArray, arrayRank, isParams);
}
}
}
| |
//
// CheckBox.cs
//
// Author:
// Lluis Sanchez Gual <[email protected]>
//
// Copyright (c) 2011 Xamarin Inc
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using System.ComponentModel;
using Xwt.Backends;
using System.Windows.Markup;
using System.Linq;
using System.Collections.Generic;
namespace Xwt
{
[BackendType (typeof(IRadioButtonBackend))]
[ContentProperty("Content")]
public class RadioButton: Widget
{
Widget content;
string label = "";
RadioButtonGroup radioGroup;
EventHandler clicked;
EventHandler activeChanged;
EventHandler activeSet;
protected new class WidgetBackendHost: Widget.WidgetBackendHost, IRadioButtonEventSink
{
public void OnClicked ()
{
((RadioButton)Parent).OnClicked (EventArgs.Empty);
}
public void OnToggled ()
{
((RadioButton)Parent).OnActiveChanged (EventArgs.Empty);
}
}
static RadioButton ()
{
MapEvent (RadioButtonEvent.Clicked, typeof(RadioButton), "OnClicked");
MapEvent (RadioButtonEvent.ActiveChanged, typeof(RadioButton), "OnActiveChanged");
MapEvent (RadioButtonEvent.ActiveChanged, typeof(RadioButton), "OnActivated");
}
public RadioButton ()
{
}
public RadioButton (string label)
{
VerifyConstructorCall (this);
Label = label;
}
public RadioButton (Widget content)
{
VerifyConstructorCall (this);
Content = content;
}
protected override BackendHost CreateBackendHost ()
{
return new WidgetBackendHost ();
}
IRadioButtonBackend Backend {
get { return (IRadioButtonBackend) BackendHost.Backend; }
}
[DefaultValue ("")]
public string Label {
get { return label; }
set {
label = value;
Backend.SetContent (label);
OnPreferredSizeChanged ();
}
}
[DefaultValue (null)]
public new Widget Content {
get { return content; }
set {
if (content != null)
UnregisterChild (content);
content = value;
if (content != null)
RegisterChild (content);
Backend.SetContent ((IWidgetBackend)GetBackend (content));
OnPreferredSizeChanged ();
}
}
public RadioButtonGroup Group {
get {
if (radioGroup == null)
Group = new RadioButtonGroup () { GroupBackend = Backend.Group };
return radioGroup;
}
set {
if (radioGroup != null)
radioGroup.Remove (this);
radioGroup = value;
if (radioGroup == null)
radioGroup = new RadioButtonGroup ();
if (radioGroup.GroupBackend == null)
radioGroup.GroupBackend = Backend.Group;
else
Backend.Group = radioGroup.GroupBackend;
radioGroup.Add (this);
}
}
[DefaultValue (false)]
public bool Active {
get { return Backend.Active; }
set {
if (!value && Active && radioGroup != null)
radioGroup.ResetSelection ();
Backend.Active = value;
}
}
protected virtual void OnClicked (EventArgs e)
{
if (clicked != null)
clicked (this, e);
}
protected virtual void OnActiveChanged (EventArgs e)
{
if (activeChanged != null)
activeChanged (this, e);
}
protected virtual void OnActivated (EventArgs e)
{
if (activeSet != null)
activeSet (this, e);
}
public event EventHandler Clicked {
add {
BackendHost.OnBeforeEventAdd (RadioButtonEvent.Clicked, clicked);
clicked += value;
}
remove {
clicked -= value;
BackendHost.OnAfterEventRemove (RadioButtonEvent.Clicked, clicked);
}
}
public event EventHandler ActiveChanged {
add {
BackendHost.OnBeforeEventAdd (RadioButtonEvent.ActiveChanged, activeChanged);
activeChanged += value;
}
remove {
activeChanged -= value;
BackendHost.OnAfterEventRemove (RadioButtonEvent.ActiveChanged, activeChanged);
}
}
public event EventHandler Activated {
add {
if (activeSet == null)
ActiveChanged += HandleActiveChanged;
activeSet += value;
}
remove {
activeSet -= value;
if (activeSet == null)
ActiveChanged -= HandleActiveChanged;
}
}
void HandleActiveChanged (object sender, EventArgs e)
{
if (Active)
OnActivated (e);
}
}
public class RadioButtonGroup
{
internal object GroupBackend;
List<RadioButton> items = new List<RadioButton> ();
EventHandler activeEvent;
RadioButton activeRadioButton;
bool eventsEnabled;
public RadioButton ActiveRadioButton {
get {
if (eventsEnabled)
return activeRadioButton;
else {
EnableActiveEvent ();
return activeRadioButton = items.FirstOrDefault (r => r.Active);
}
}
}
internal void Add (RadioButton r)
{
items.Add (r);
if (eventsEnabled)
r.ActiveChanged += HandleActiveChanged;
}
internal void Remove (RadioButton r)
{
items.Remove (r);
if (eventsEnabled)
r.ActiveChanged -= HandleActiveChanged;
}
public event EventHandler ActiveRadioButtonChanged {
add {
if (!eventsEnabled)
EnableActiveEvent ();
activeEvent += value;
}
remove {
activeEvent -= value;
}
}
public void ClearActive ()
{
if (ActiveRadioButton != null)
ActiveRadioButton.Active = false;
}
void EnableActiveEvent ()
{
if (!eventsEnabled) {
eventsEnabled = true;
foreach (var b in items)
b.ActiveChanged += HandleActiveChanged;
}
}
void HandleActiveChanged (object sender, EventArgs e)
{
if (((RadioButton)sender).Active)
SetActive ((RadioButton)sender);
}
void SetActive (RadioButton r)
{
var old = activeRadioButton;
activeRadioButton = r;
if (old != r && activeEvent != null)
activeEvent (this, EventArgs.Empty);
}
internal void ResetSelection ()
{
SetActive (null);
}
}
}
| |
using Microsoft.Azure.Management.NetApp;
using Microsoft.Azure.Management.NetApp.Models;
using System;
using System.Collections.Generic;
using System.Threading;
using Xunit;
namespace NetApp.Tests.Helpers
{
public class ResourceUtils
{
public const long tebibyte = 1024L * 1024L * 1024L * 1024L;
public const long gibibyte = 1024L * 1024L * 1024L;
private const string remoteSuffix = "-R";
//public const string vnet = "sdknettestqa2vnet464";
public const string vnet = "sdknettestqa2vnet464";
public const string repVnet = "sdktestqa2vnet464";
//public const string remoteVnet = repVnet + remoteSuffix;
public const string remoteVnet = "sdktestqa2vnet464east-R";
//public const string subsId = "8f38cfec-0ecd-413a-892e-2494f77a3b56";
//public const string subsId = "0661B131-4A11-479B-96BF-2F95ACCA2F73";
public const string subsId = "69a75bda-882e-44d5-8431-63421204132a";
public const string location = "westus2";
//public const string location = "eastus2euap";
//public const string remoteLocation = "southcentralus";
public const string remoteLocation = "eastus";
public const string resourceGroup = "sdk-net-test-qa2";
//public const string resourceGroup = "ab_sdk_test_rg";
public const string repResourceGroup = "sdk-test-qa2";
public const string remoteResourceGroup = repResourceGroup + remoteSuffix;
public const string accountName1 = "sdk-net-tests-acc-212";
public const string accountName1Repl = "sdk-net-tests-acc-21b";
public const string remoteAccountName1 = accountName1Repl + remoteSuffix;
public const string accountName2 = "sdk-net-tests-acc-23";
public const string poolName1 = "sdk-net-tests-pool-205";
public const string poolName1Repl = "sdk-net-tests-pool-11b";
public const string remotePoolName1 = poolName1Repl + remoteSuffix;
public const string poolName2 = "sdk-net-tests-pool-211";
public const string volumeName1 = "sdk-net-tests-vol-2105";
//Backup
//public const string backupLocation = "eastus2euap"; //"westusstage";
//public const string backupVnet = "sdknettestqa2vnet464euap";
public const string backupLocation = "southcentralusstage";
public const string backupVnet = "sdknettestqa2vnet464southcentralus";
public const string volumeBackupAccountName1 = "sdk-net-tests-acc-214v";
public const string backupPoolName1 = "sdk-net-tests-pool-206";
public const string backupVolumeName1 = "sdk-net-tests-vol-2112";
public const string backupName1 = backupVolumeName1 + "-b1";
public const string backupName2 = backupVolumeName1 + "-b2";
public const string volumeName1ReplSource = "sdk-net-tests-vol-1001b-source";
public const string volumeName1ReplDest = volumeName1ReplSource + remoteSuffix+"dest";
public const string volumeName2 = "sdk-net-tests-vol-1001";
public const string snapshotName1 = "sdk-net-tests-snap-11";
public const string snapshotName2 = "sdk-net-tests-snap-12";
public const string snapshotPolicyName1 = "sdk-net-tests-snapshotPolicy-1";
public const string snapshotPolicyName2 = "sdk-net-tests-snapshotPolicy-2";
public const string backupPolicyName1 = "sdk-net-tests-backupPolicy-105a";
public const string backupPolicyName2 = "sdk-net-tests-backupPolicy-105b";
//West us Volume gorup
//public const string volumeGroupName1 = "sdk-net-tests-volGroup-1";
//public const string vgVnet = "sdknettestqa2vnet464";
//public const string vgLocation = "westus2";
//public const string proximityPlacementGroup = "/subscriptions/69a75bda-882e-44d5-8431-63421204132a/resourceGroups/sdk-net-test-qa2/providers/Microsoft.Compute/proximityPlacementGroups/sdk_test_standard_ppg";
//public const string GENPOPDeploymentSpecID = "30542149-bfca-5618-1879-9863dc6767f1";
//public const string SAPHANAOnGENPOPDeploymentSpecID = "20542149-bfca-5618-1879-9863dc6767f1";
//northeurope
public const string volumeGroupName1 = "sdk-net-tests-volGroup-1";
public const string vgVnet = "vnetnortheurope-anf";
public const string vgLocation = "northeurope";
public const string proximityPlacementGroup = "/subscriptions/69a75bda-882e-44d5-8431-63421204132a/resourceGroups/sdk-net-test-qa2/providers/Microsoft.Compute/proximityPlacementGroups/sdk_test_northeurope_ppg";
public const string GENPOPDeploymentSpecID = "30542149-bfca-5618-1879-9863dc6767f1";
public const string SAPHANAOnGENPOPDeploymentSpecID = "20542149-bfca-5618-1879-9863dc6767f1";
public const string subnetId = "/subscriptions/" + subsId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + vnet + "/subnets/default";
public static ActiveDirectory activeDirectory = new ActiveDirectory()
{
Username = "sdkuser",
Password = "sdkpass",
Domain = "sdkdomain",
Dns = "192.0.2.2",
SmbServerName = "SDKSMBSeNa",
};
public static ActiveDirectory activeDirectory2 = new ActiveDirectory()
{
Username = "sdkuser1",
Password = "sdkpass1",
Domain = "sdkdomain",
Dns = "192.0.2.1",
SmbServerName = "SDKSMBSeNa",
};
public static ExportPolicyRule defaultExportPolicyRule = new ExportPolicyRule()
{
RuleIndex = 1,
UnixReadOnly = false,
UnixReadWrite = true,
Cifs = false,
Nfsv3 = true,
Nfsv41 = false,
AllowedClients = "0.0.0.0/0"
};
public static IList<ExportPolicyRule> defaultExportPolicyRuleList = new List<ExportPolicyRule>()
{
defaultExportPolicyRule
};
public static VolumePropertiesExportPolicy defaultExportPolicy = new VolumePropertiesExportPolicy()
{
Rules = defaultExportPolicyRuleList
};
private const int delay = 5000;
private const int retryAttempts = 4;
public static NetAppAccount CreateAccount(AzureNetAppFilesManagementClient netAppMgmtClient, string accountName = accountName1, string resourceGroup = resourceGroup, string location = location, IDictionary<string, string> tags = default(IDictionary<string, string>), ActiveDirectory activeDirectory = null)
{
// request reference example
// az netappfiles account update -g --account-name cli-lf-acc2 --active-directories '[{"username": "aduser", "password": "aduser", "smbservername": "SMBSERVER", "dns": "1.2.3.4", "domain": "westcentralus"}]' -l westus2
var activeDirectories = activeDirectory != null ? new List <ActiveDirectory> { activeDirectory } : new List<ActiveDirectory>();
var netAppAccount = new NetAppAccount()
{
Location = location,
Tags = tags,
ActiveDirectories = activeDirectories
};
var resource = netAppMgmtClient.Accounts.CreateOrUpdate(netAppAccount, resourceGroup, accountName);
Assert.Equal(resource.Name, accountName);
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
return resource;
}
public static CapacityPool CreatePool(AzureNetAppFilesManagementClient netAppMgmtClient, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, string location = location, IDictionary<string, string> tags = default(IDictionary<string, string>), bool poolOnly = false, string serviceLevel = "Premium", long poolSize = 4398046511104, string qosType = QosType.Auto)
{
if (!poolOnly)
{
CreateAccount(netAppMgmtClient, accountName, resourceGroup: resourceGroup, location: location, tags: tags);
}
var pool = new CapacityPool
{
Location = location,
Size = poolSize,
ServiceLevel = serviceLevel,
Tags = tags,
QosType = qosType
};
CapacityPool resource;
try
{
resource = netAppMgmtClient.Pools.CreateOrUpdate(pool, resourceGroup, accountName, poolName);
}
catch
{
// try one more time
resource = netAppMgmtClient.Pools.CreateOrUpdate(pool, resourceGroup, accountName, poolName);
}
Assert.Equal(resource.Name, accountName + '/' + poolName);
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
return resource;
}
public static Volume CreateVolume(AzureNetAppFilesManagementClient netAppMgmtClient, string volumeName = volumeName1, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, string location = location, List<string> protocolTypes = null, IDictionary<string, string> tags = default(IDictionary<string, string>), VolumePropertiesExportPolicy exportPolicy = null, string vnet = vnet, bool volumeOnly = false, string snapshotId = null, string snapshotPolicyId = null, string backupVnetLocation = "", long poolSize = 4398046511104, string enableSubvolumes = EnableSubvolumes.Disabled)
{
if (!volumeOnly)
{
CreatePool(netAppMgmtClient, poolName, accountName, resourceGroup: resourceGroup, location: location, poolSize: poolSize);
}
var defaultProtocolType = new List<string>() { "NFSv3" };
var volumeProtocolTypes = protocolTypes == null ? defaultProtocolType : protocolTypes;
var volume = new Volume
{
Location = location,
UsageThreshold = 100 * gibibyte,
ProtocolTypes = volumeProtocolTypes,
CreationToken = volumeName,
SubnetId = "/subscriptions/" + netAppMgmtClient.SubscriptionId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + vnet + "/subnets/default",
Tags = tags,
ExportPolicy = exportPolicy,
SnapshotId = snapshotId,
SecurityStyle = "unix",
EnableSubvolumes = enableSubvolumes
};
if (snapshotPolicyId != null)
{
var volumDataProtection = new VolumePropertiesDataProtection
{
Snapshot = new VolumeSnapshotProperties(snapshotPolicyId)
};
volume.DataProtection = volumDataProtection;
}
var resource = netAppMgmtClient.Volumes.CreateOrUpdate(volume, resourceGroup, accountName, poolName, volumeName);
Assert.Equal(resource.Name, accountName + '/' + poolName + '/' + volumeName);
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
return resource;
}
public static Volume CreateDpVolume(AzureNetAppFilesManagementClient netAppMgmtClient, Volume sourceVolume, string volumeName = volumeName1ReplDest, string poolName = remotePoolName1, string accountName = remoteAccountName1, string resourceGroup = remoteResourceGroup, string location = location, List<string> protocolTypes = null, IDictionary<string, string> tags = default(IDictionary<string, string>), VolumePropertiesExportPolicy exportPolicy = null, bool volumeOnly = false, string snapshotId = null)
{
if (!volumeOnly)
{
CreatePool(netAppMgmtClient, poolName, accountName, resourceGroup: resourceGroup, location: remoteLocation);
}
var defaultProtocolType = new List<string>() { "NFSv3" };
var volumeProtocolTypes = protocolTypes == null ? defaultProtocolType : protocolTypes;
var replication = new ReplicationObject
{
EndpointType = "dst",
RemoteVolumeResourceId = sourceVolume.Id,
ReplicationSchedule = "_10minutely"
};
var dataProtection = new VolumePropertiesDataProtection
{
Replication = replication
};
var volume = new Volume
{
Location = remoteLocation,
UsageThreshold = 100 * gibibyte,
ProtocolTypes = volumeProtocolTypes,
CreationToken = volumeName,
//SubnetId = "/subscriptions/" + subsId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + remoteVnet + "/subnets/default",
SubnetId = "/subscriptions/" + netAppMgmtClient.SubscriptionId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + remoteVnet + "/subnets/default",
Tags = tags,
ExportPolicy = exportPolicy,
SnapshotId = snapshotId,
VolumeType = "DataProtection",
DataProtection = dataProtection
};
var resource = netAppMgmtClient.Volumes.CreateOrUpdate(volume, resourceGroup, accountName, poolName, volumeName);
Assert.Equal(resource.Name, accountName + '/' + poolName + '/' + volumeName);
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
return resource;
}
public static Volume CreateBackedupVolume(AzureNetAppFilesManagementClient netAppMgmtClient, string volumeName = volumeName1, string poolName = poolName1, string accountName = volumeBackupAccountName1, string resourceGroup = resourceGroup, string location = backupLocation, List<string> protocolTypes = null, IDictionary<string, string> tags = default(IDictionary<string, string>), VolumePropertiesExportPolicy exportPolicy = null, bool volumeOnly = false, string vnet = backupVnet, string backupPolicyId = null, string backupVaultId = null)
{
if (!volumeOnly)
{
CreatePool(netAppMgmtClient, poolName, accountName, resourceGroup: resourceGroup, location: location);
}
var defaultProtocolType = new List<string>() { "NFSv3" };
var volumeProtocolTypes = protocolTypes == null ? defaultProtocolType : protocolTypes;
var dataProtection = new VolumePropertiesDataProtection
{
Backup = new VolumeBackupProperties {BackupEnabled = true, BackupPolicyId = backupPolicyId, VaultId = backupVaultId }
};
var volume = new Volume
{
Location = location,
UsageThreshold = 100 * gibibyte,
ProtocolTypes = volumeProtocolTypes,
CreationToken = volumeName,
SubnetId = "/subscriptions/" + netAppMgmtClient.SubscriptionId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + backupVnet + "/subnets/default",
Tags = tags,
ExportPolicy = exportPolicy,
VolumeType = "DataProtection",
DataProtection = dataProtection
};
var resource = netAppMgmtClient.Volumes.CreateOrUpdate(volume, resourceGroup, accountName, poolName, volumeName);
Assert.Equal(resource.Name, accountName + '/' + poolName + '/' + volumeName);
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
return resource;
}
public static VolumeGroupDetails CreateVolumeGroup(AzureNetAppFilesManagementClient netAppMgmtClient, string volumeGroupName = volumeGroupName1, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, string location = vgLocation, List<string> protocolTypes = null, IDictionary<string, string> tags = default(IDictionary<string, string>), VolumePropertiesExportPolicy exportPolicy = null, string vnet = vgVnet, bool volumeGroupOnly = false, string snapshotId = null, string snapshotPolicyId = null, string backupVnetLocation = "", long poolSize = 4398046511104)
{
CapacityPool pool = null;
if (!volumeGroupOnly)
{
pool = CreatePool(netAppMgmtClient, poolName, accountName, resourceGroup: resourceGroup, location: location, poolSize: poolSize, qosType: QosType.Manual);
}
var defaultProtocolType = new List<string>() { "NFSv4.1" };
var volumeProtocolTypes = protocolTypes == null ? defaultProtocolType : protocolTypes;
if (exportPolicy == null)
{
exportPolicy = new VolumePropertiesExportPolicy
{
Rules = new List<ExportPolicyRule>()
{
new ExportPolicyRule { Nfsv3 = false, Nfsv41 = true, RuleIndex = 1, AllowedClients = "0.0.0.0/0" },
new ExportPolicyRule { Nfsv3 = false, Nfsv41 = true, RuleIndex = 2, AllowedClients = "0.0.0.0/0"}
}
};
}
var volumeGroupVolumeProperties = new List<VolumeGroupVolumeProperties>
{
new VolumeGroupVolumeProperties {
Name = $"{volumeGroupName}-log-1",
VolumeSpecName = "log",
CapacityPoolResourceId = pool.Id,
ProximityPlacementGroup = proximityPlacementGroup,
UsageThreshold = 100 * gibibyte,
ThroughputMibps = 6,
ProtocolTypes = volumeProtocolTypes,
CreationToken = $"{volumeGroupName}-log-1",
SubnetId = "/subscriptions/" + netAppMgmtClient.SubscriptionId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + vnet + "/subnets/default",
Tags = tags,
ExportPolicy = exportPolicy
},
new VolumeGroupVolumeProperties {
Name = $"{volumeGroupName}-DataBackup-2",
VolumeSpecName = "data-backup",
CapacityPoolResourceId = pool.Id,
ProximityPlacementGroup = proximityPlacementGroup,
ThroughputMibps = 6,
UsageThreshold = 100 * gibibyte,
ProtocolTypes = volumeProtocolTypes,
CreationToken = $"{volumeGroupName}-DataBackup-2",
SubnetId = "/subscriptions/" + netAppMgmtClient.SubscriptionId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + vnet + "/subnets/default",
Tags = tags,
ExportPolicy = exportPolicy
},
new VolumeGroupVolumeProperties {
Name = $"{volumeGroupName}-DataVol-3",
VolumeSpecName = "data",
ProximityPlacementGroup = proximityPlacementGroup,
CapacityPoolResourceId = pool.Id,
ThroughputMibps = 6,
UsageThreshold = 100 * gibibyte,
ProtocolTypes = volumeProtocolTypes,
CreationToken = $"{volumeGroupName}-DataVol-3",
SubnetId = "/subscriptions/" + netAppMgmtClient.SubscriptionId + "/resourceGroups/" + resourceGroup + "/providers/Microsoft.Network/virtualNetworks/" + vnet + "/subnets/default",
Tags = tags,
ExportPolicy = exportPolicy
}
};
var volumeGroup = new VolumeGroupDetails()
{
Location = location,
Tags = tags,
GroupMetaData = new VolumeGroupMetaData()
{
ApplicationType = ApplicationType.SAPHANA,
ApplicationIdentifier = "SH1",
GlobalPlacementRules = new List<PlacementKeyValuePairs> { new PlacementKeyValuePairs { Key = "Key1", Value = "value1" } },
DeploymentSpecId = SAPHANAOnGENPOPDeploymentSpecID,
GroupDescription = "group description"
},
Volumes = volumeGroupVolumeProperties
};
var resource = netAppMgmtClient.VolumeGroups.Create(volumeGroup, resourceGroup, accountName, volumeGroupName);
Assert.Equal(accountName + '/' + volumeGroupName, resource.Name);
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
return resource;
}
public static Snapshot CreateSnapshot(AzureNetAppFilesManagementClient netAppMgmtClient, string snapshotName = snapshotName1, string volumeName = volumeName1, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, string location = location, bool snapshotOnly = false)
{
Volume volume = null;
var snapshot = new Snapshot();
if (!snapshotOnly)
{
volume = CreateVolume(netAppMgmtClient, volumeName, poolName, accountName);
snapshot = new Snapshot
{
Location = location,
};
}
else
{
// for those tests where snapshotOnly is true, no filesystem id will be available
// use this opportunity to test snapshot creation with no filesystem id provided
// for these cases it should use the name in the resource id
snapshot = new Snapshot
{
Location = location,
};
}
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay); // some robustness against ARM caching
}
var resource = netAppMgmtClient.Snapshots.Create(snapshot, resourceGroup, accountName, poolName, volumeName, snapshotName);
Assert.Equal(resource.Name, accountName + '/' + poolName + '/' + volumeName + '/' + snapshotName);
return resource;
}
public static void DeleteAccount(AzureNetAppFilesManagementClient netAppMgmtClient, string accountName = accountName1, string resourceGroup = resourceGroup, bool deep = false)
{
if (deep)
{
// find and delete all nested resources - not implemented
}
// now delete the account
netAppMgmtClient.Accounts.Delete(resourceGroup, accountName);
}
public static void DeletePool(AzureNetAppFilesManagementClient netAppMgmtClient, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, bool deep = false)
{
bool retry = true;
int co = 0;
if (deep)
{
// find and delete all nested resources - not implemented
}
// now delete the pool - with retry for test robustness due to
// - ARM caching (ARM continues to tidy up even after the awaited async op
// has returned)
// - other async actions in RP/SDE/NRP
// e.g. snapshot deletion might not be complete and therefore pool has child resource
while (retry == true)
{
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay);
}
try
{
netAppMgmtClient.Pools.Delete(resourceGroup, accountName, poolName);
retry = false;
}
catch
{
co++;
if (co > retryAttempts)
{
retry = false;
}
}
}
}
public static void DeleteVolume(AzureNetAppFilesManagementClient netAppMgmtClient, string volumeName = volumeName1, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, string location = location, bool deep = false)
{
bool retry = true;
int co = 0;
if (deep)
{
// find and delete all nested resources - not implemented
}
// now delete the volume - with retry for test robustness due to
// - ARM caching (ARM continues to tidy up even after the awaited async op
// has returned)
// - other async actions in RP/SDE/NRP
// e.g. snapshot deletion might not be complete and therefore volume has child resource
while (retry == true)
{
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay);
}
try
{
netAppMgmtClient.Volumes.Delete(resourceGroup, accountName, poolName, volumeName);
retry = false;
}
catch
{
co++;
if (co > retryAttempts)
{
retry = false;
}
}
}
}
public static void DeleteSnapshot(AzureNetAppFilesManagementClient netAppMgmtClient, string snapshotName = snapshotName1, string volumeName = volumeName1, string poolName = poolName1, string accountName = accountName1, string resourceGroup = resourceGroup, string location = location, bool deep = false)
{
bool retry = true;
int co = 0;
if (deep)
{
// find and delete all nested resources - not implemented
}
// now delete the snapshot - with retry for test robustness due to
// - ARM caching (ARM continues to tidy up even after the awaited async op
// has returned)
// - other async actions in RP/SDE/NRP
// e.g. snapshot deletion might fail if the actual creation is not complete at
// all levels
while (retry == true)
{
if (Environment.GetEnvironmentVariable("AZURE_TEST_MODE") == "Record")
{
Thread.Sleep(delay);
}
try
{
netAppMgmtClient.Snapshots.Delete(resourceGroup, accountName, poolName, volumeName, snapshotName);
retry = false;
}
catch
{
co++;
if (co > retryAttempts)
{
retry = false;
}
}
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using IndexOutput = Lucene.Net.Store.IndexOutput;
using RAMOutputStream = Lucene.Net.Store.RAMOutputStream;
using ArrayUtil = Lucene.Net.Util.ArrayUtil;
namespace Lucene.Net.Index
{
sealed class TermVectorsTermsWriter:TermsHashConsumer
{
private void InitBlock()
{
docFreeList = new PerDoc[1];
}
internal DocumentsWriter docWriter;
internal TermVectorsWriter termVectorsWriter;
internal PerDoc[] docFreeList;
internal int freeCount;
internal IndexOutput tvx;
internal IndexOutput tvd;
internal IndexOutput tvf;
internal int lastDocID;
public TermVectorsTermsWriter(DocumentsWriter docWriter)
{
InitBlock();
this.docWriter = docWriter;
}
public override TermsHashConsumerPerThread AddThread(TermsHashPerThread termsHashPerThread)
{
return new TermVectorsTermsWriterPerThread(termsHashPerThread, this);
}
internal override void CreatePostings(RawPostingList[] postings, int start, int count)
{
int end = start + count;
for (int i = start; i < end; i++)
postings[i] = new PostingList();
}
public override void Flush(IDictionary<TermsHashConsumerPerThread, ICollection<TermsHashConsumerPerField>> threadsAndFields, SegmentWriteState state)
{
lock (this)
{
// NOTE: it's possible that all documents seen in this segment
// hit non-aborting exceptions, in which case we will
// not have yet init'd the TermVectorsWriter. This is
// actually OK (unlike in the stored fields case)
// because, although IieldInfos.hasVectors() will return
// true, the TermVectorsReader gracefully handles
// non-existence of the term vectors files.
if (tvx != null)
{
if (state.numDocsInStore > 0)
// In case there are some final documents that we
// didn't see (because they hit a non-aborting exception):
Fill(state.numDocsInStore - docWriter.DocStoreOffset);
tvx.Flush();
tvd.Flush();
tvf.Flush();
}
foreach(var entry in threadsAndFields)
{
foreach(var field in entry.Value)
{
TermVectorsTermsWriterPerField perField = (TermVectorsTermsWriterPerField)field;
perField.termsHashPerField.Reset();
perField.ShrinkHash();
}
TermVectorsTermsWriterPerThread perThread = (TermVectorsTermsWriterPerThread) entry.Key;
perThread.termsHashPerThread.Reset(true);
}
}
}
internal override void CloseDocStore(SegmentWriteState state)
{
lock (this)
{
if (tvx != null)
{
// At least one doc in this run had term vectors
// enabled
Fill(state.numDocsInStore - docWriter.DocStoreOffset);
tvx.Close();
tvf.Close();
tvd.Close();
tvx = null;
System.Diagnostics.Debug.Assert(state.docStoreSegmentName != null);
System.String fileName = state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION;
if (4 + ((long) state.numDocsInStore) * 16 != state.directory.FileLength(fileName))
throw new System.SystemException("after flush: tvx size mismatch: " + state.numDocsInStore + " docs vs " + state.directory.FileLength(fileName) + " length in bytes of " + fileName + " file exists?=" + state.directory.FileExists(fileName));
state.flushedFiles.Add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
state.flushedFiles.Add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
state.flushedFiles.Add(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
docWriter.RemoveOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
docWriter.RemoveOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
docWriter.RemoveOpenFile(state.docStoreSegmentName + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
lastDocID = 0;
}
}
}
internal int allocCount;
internal PerDoc GetPerDoc()
{
lock (this)
{
if (freeCount == 0)
{
allocCount++;
if (allocCount > docFreeList.Length)
{
// Grow our free list up front to make sure we have
// enough space to recycle all outstanding PerDoc
// instances
System.Diagnostics.Debug.Assert(allocCount == 1 + docFreeList.Length);
docFreeList = new PerDoc[ArrayUtil.GetNextSize(allocCount)];
}
return new PerDoc(this);
}
else
return docFreeList[--freeCount];
}
}
/// <summary>Fills in no-term-vectors for all docs we haven't seen
/// since the last doc that had term vectors.
/// </summary>
internal void Fill(int docID)
{
int docStoreOffset = docWriter.DocStoreOffset;
int end = docID + docStoreOffset;
if (lastDocID < end)
{
long tvfPosition = tvf.FilePointer;
while (lastDocID < end)
{
tvx.WriteLong(tvd.FilePointer);
tvd.WriteVInt(0);
tvx.WriteLong(tvfPosition);
lastDocID++;
}
}
}
internal void InitTermVectorsWriter()
{
lock (this)
{
if (tvx == null)
{
System.String docStoreSegment = docWriter.DocStoreSegment;
if (docStoreSegment == null)
return ;
System.Diagnostics.Debug.Assert(docStoreSegment != null);
// If we hit an exception while init'ing the term
// vector output files, we must abort this segment
// because those files will be in an unknown
// state:
tvx = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
tvd = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
tvf = docWriter.directory.CreateOutput(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
tvx.WriteInt(TermVectorsReader.FORMAT_CURRENT);
tvd.WriteInt(TermVectorsReader.FORMAT_CURRENT);
tvf.WriteInt(TermVectorsReader.FORMAT_CURRENT);
docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_INDEX_EXTENSION);
docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_FIELDS_EXTENSION);
docWriter.AddOpenFile(docStoreSegment + "." + IndexFileNames.VECTORS_DOCUMENTS_EXTENSION);
lastDocID = 0;
}
}
}
internal void FinishDocument(PerDoc perDoc)
{
lock (this)
{
System.Diagnostics.Debug.Assert(docWriter.writer.TestPoint("TermVectorsTermsWriter.finishDocument start"));
InitTermVectorsWriter();
Fill(perDoc.docID);
// Append term vectors to the real outputs:
tvx.WriteLong(tvd.FilePointer);
tvx.WriteLong(tvf.FilePointer);
tvd.WriteVInt(perDoc.numVectorFields);
if (perDoc.numVectorFields > 0)
{
for (int i = 0; i < perDoc.numVectorFields; i++)
tvd.WriteVInt(perDoc.fieldNumbers[i]);
System.Diagnostics.Debug.Assert(0 == perDoc.fieldPointers [0]);
long lastPos = perDoc.fieldPointers[0];
for (int i = 1; i < perDoc.numVectorFields; i++)
{
long pos = perDoc.fieldPointers[i];
tvd.WriteVLong(pos - lastPos);
lastPos = pos;
}
perDoc.perDocTvf.WriteTo(tvf);
perDoc.numVectorFields = 0;
}
System.Diagnostics.Debug.Assert(lastDocID == perDoc.docID + docWriter.DocStoreOffset);
lastDocID++;
perDoc.Reset();
Free(perDoc);
System.Diagnostics.Debug.Assert(docWriter.writer.TestPoint("TermVectorsTermsWriter.finishDocument end"));
}
}
public bool FreeRAM()
{
// We don't hold any state beyond one doc, so we don't
// free persistent RAM here
return false;
}
public override void Abort()
{
if (tvx != null)
{
try
{
tvx.Close();
}
catch (System.Exception)
{
}
tvx = null;
}
if (tvd != null)
{
try
{
tvd.Close();
}
catch (System.Exception)
{
}
tvd = null;
}
if (tvf != null)
{
try
{
tvf.Close();
}
catch (System.Exception)
{
}
tvf = null;
}
lastDocID = 0;
}
internal void Free(PerDoc doc)
{
lock (this)
{
System.Diagnostics.Debug.Assert(freeCount < docFreeList.Length);
docFreeList[freeCount++] = doc;
}
}
internal class PerDoc:DocumentsWriter.DocWriter
{
public PerDoc(TermVectorsTermsWriter enclosingInstance)
{
InitBlock(enclosingInstance);
}
private void InitBlock(TermVectorsTermsWriter enclosingInstance)
{
this.enclosingInstance = enclosingInstance;
buffer = enclosingInstance.docWriter.NewPerDocBuffer();
perDocTvf = new RAMOutputStream(buffer);
}
private TermVectorsTermsWriter enclosingInstance;
public TermVectorsTermsWriter Enclosing_Instance
{
get
{
return enclosingInstance;
}
}
internal DocumentsWriter.PerDocBuffer buffer;
internal RAMOutputStream perDocTvf;
internal int numVectorFields;
internal int[] fieldNumbers = new int[1];
internal long[] fieldPointers = new long[1];
internal void Reset()
{
perDocTvf.Reset();
buffer.Recycle();
numVectorFields = 0;
}
public override void Abort()
{
Reset();
Enclosing_Instance.Free(this);
}
internal void AddField(int fieldNumber)
{
if (numVectorFields == fieldNumbers.Length)
{
fieldNumbers = ArrayUtil.Grow(fieldNumbers);
fieldPointers = ArrayUtil.Grow(fieldPointers);
}
fieldNumbers[numVectorFields] = fieldNumber;
fieldPointers[numVectorFields] = perDocTvf.FilePointer;
numVectorFields++;
}
public override long SizeInBytes()
{
return buffer.SizeInBytes;
}
public override void Finish()
{
Enclosing_Instance.FinishDocument(this);
}
}
internal sealed class PostingList:RawPostingList
{
internal int freq; // How many times this term occurred in the current doc
internal int lastOffset; // Last offset we saw
internal int lastPosition; // Last position where this term occurred
}
internal override int BytesPerPosting()
{
return RawPostingList.BYTES_SIZE + 3 * DocumentsWriter.INT_NUM_BYTE;
}
}
}
| |
using System;
using QuoteMyGoods.Models;
using Microsoft.EntityFrameworkCore.Infrastructure;
using Microsoft.EntityFrameworkCore.Migrations;
using Microsoft.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore.Metadata;
namespace QuoteMyGoods.Migrations
{
[DbContext(typeof(QMGContext))]
[Migration("20160318124422_removeAdmin")]
partial class removeAdmin
{
protected override void BuildTargetModel(ModelBuilder modelBuilder)
{
modelBuilder
.HasAnnotation("ProductVersion", "7.0.0-rc1-16348")
.HasAnnotation("SqlServer:ValueGenerationStrategy", SqlServerValueGenerationStrategy.IdentityColumn);
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityRole", b =>
{
b.Property<string>("Id");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken();
b.Property<string>("Name")
.HasAnnotation("MaxLength", 256);
b.Property<string>("NormalizedName")
.HasAnnotation("MaxLength", 256);
b.HasKey("Id");
b.HasIndex("NormalizedName")
.HasAnnotation("Relational:Name", "RoleNameIndex");
b.HasAnnotation("Relational:TableName", "AspNetRoles");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityRoleClaim<string>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd();
b.Property<string>("ClaimType");
b.Property<string>("ClaimValue");
b.Property<string>("RoleId")
.IsRequired();
b.HasKey("Id");
b.HasAnnotation("Relational:TableName", "AspNetRoleClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityUserClaim<string>", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd();
b.Property<string>("ClaimType");
b.Property<string>("ClaimValue");
b.Property<string>("UserId")
.IsRequired();
b.HasKey("Id");
b.HasAnnotation("Relational:TableName", "AspNetUserClaims");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityUserLogin<string>", b =>
{
b.Property<string>("LoginProvider");
b.Property<string>("ProviderKey");
b.Property<string>("ProviderDisplayName");
b.Property<string>("UserId")
.IsRequired();
b.HasKey("LoginProvider", "ProviderKey");
b.HasAnnotation("Relational:TableName", "AspNetUserLogins");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityUserRole<string>", b =>
{
b.Property<string>("UserId");
b.Property<string>("RoleId");
b.HasKey("UserId", "RoleId");
b.HasAnnotation("Relational:TableName", "AspNetUserRoles");
});
modelBuilder.Entity("QuoteMyGoods.Models.Product", b =>
{
b.Property<int>("Id")
.ValueGeneratedOnAdd();
b.Property<string>("Category");
b.Property<string>("Description");
b.Property<string>("ImgUrl");
b.Property<string>("Name");
b.Property<decimal>("Price");
b.HasKey("Id");
});
modelBuilder.Entity("QuoteMyGoods.Models.QMGUser", b =>
{
b.Property<string>("Id");
b.Property<int>("AccessFailedCount");
b.Property<bool>("Admin");
b.Property<string>("ConcurrencyStamp")
.IsConcurrencyToken();
b.Property<string>("Email")
.HasAnnotation("MaxLength", 256);
b.Property<bool>("EmailConfirmed");
b.Property<bool>("LockoutEnabled");
b.Property<DateTimeOffset?>("LockoutEnd");
b.Property<string>("NormalizedEmail")
.HasAnnotation("MaxLength", 256);
b.Property<string>("NormalizedUserName")
.HasAnnotation("MaxLength", 256);
b.Property<string>("PasswordHash");
b.Property<string>("PhoneNumber");
b.Property<bool>("PhoneNumberConfirmed");
b.Property<string>("SecurityStamp");
b.Property<bool>("TwoFactorEnabled");
b.Property<string>("UserName")
.HasAnnotation("MaxLength", 256);
b.HasKey("Id");
b.HasIndex("NormalizedEmail")
.HasAnnotation("Relational:Name", "EmailIndex");
b.HasIndex("NormalizedUserName")
.HasAnnotation("Relational:Name", "UserNameIndex");
b.HasAnnotation("Relational:TableName", "AspNetUsers");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityRoleClaim<string>", b =>
{
b.HasOne("Microsoft.AspNetCore.Identity.EntityFramework.IdentityRole")
.WithMany()
.HasForeignKey("RoleId");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityUserClaim<string>", b =>
{
b.HasOne("QuoteMyGoods.Models.QMGUser")
.WithMany()
.HasForeignKey("UserId");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityUserLogin<string>", b =>
{
b.HasOne("QuoteMyGoods.Models.QMGUser")
.WithMany()
.HasForeignKey("UserId");
});
modelBuilder.Entity("Microsoft.AspNetCore.Identity.EntityFramework.IdentityUserRole<string>", b =>
{
b.HasOne("Microsoft.AspNetCore.Identity.EntityFramework.IdentityRole")
.WithMany()
.HasForeignKey("RoleId");
b.HasOne("QuoteMyGoods.Models.QMGUser")
.WithMany()
.HasForeignKey("UserId");
});
}
}
}
| |
//
// Copyright (C) 2012-2014 DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.Threading;
using System.Net.Sockets;
using System.Threading.Tasks;
using Cassandra.Tasks;
using Cassandra.Requests;
using Cassandra.Responses;
using Cassandra.Serialization;
namespace Cassandra
{
internal class ControlConnection : IMetadataQueryProvider, IDisposable
{
private const string SelectPeers = "SELECT peer, data_center, rack, tokens, rpc_address, release_version FROM system.peers";
private const string SelectLocal = "SELECT * FROM system.local WHERE key='local'";
private const CassandraEventType CassandraEventTypes = CassandraEventType.TopologyChange | CassandraEventType.StatusChange | CassandraEventType.SchemaChange;
private static readonly IPAddress BindAllAddress = new IPAddress(new byte[4]);
private readonly Metadata _metadata;
private volatile Host _host;
private volatile Connection _connection;
// ReSharper disable once InconsistentNaming
private static readonly Logger _logger = new Logger(typeof (ControlConnection));
private readonly Configuration _config;
private readonly IReconnectionPolicy _reconnectionPolicy;
private IReconnectionSchedule _reconnectionSchedule;
private readonly Timer _reconnectionTimer;
private long _isShutdown;
private int _refreshCounter;
private Task<bool> _reconnectTask;
private readonly Serializer _serializer;
internal const int MetadataAbortTimeout = 5 * 60000;
/// <summary>
/// Gets the binary protocol version to be used for this cluster.
/// </summary>
public ProtocolVersion ProtocolVersion
{
get { return _serializer.ProtocolVersion; }
}
internal Host Host
{
get { return _host; }
set { _host = value; }
}
public IPEndPoint Address
{
get
{
if (_connection == null)
{
return null;
}
return _connection.Address;
}
}
public Serializer Serializer
{
get { return _serializer; }
}
internal ControlConnection(ProtocolVersion initialProtocolVersion, Configuration config, Metadata metadata)
{
_metadata = metadata;
_reconnectionPolicy = config.Policies.ReconnectionPolicy;
_reconnectionSchedule = _reconnectionPolicy.NewSchedule();
_reconnectionTimer = new Timer(_ => Reconnect().Forget(), null, Timeout.Infinite, Timeout.Infinite);
_config = config;
_serializer = new Serializer(initialProtocolVersion, config.TypeSerializers);
}
public void Dispose()
{
Shutdown();
}
/// <summary>
/// Tries to create a connection to any of the contact points and retrieve cluster metadata for the first time. Not thread-safe.
/// </summary>
/// <exception cref="NoHostAvailableException" />
/// <exception cref="TimeoutException" />
/// <exception cref="DriverInternalError" />
internal async Task Init()
{
_logger.Info("Trying to connect the ControlConnection");
await Connect(true).ConfigureAwait(false);
SubscribeEventHandlers();
var obtainingMetadataFailed = false;
try
{
await RefreshNodeList().ConfigureAwait(false);
await _metadata.RefreshKeyspaces(false).ConfigureAwait(false);
}
catch (SocketException ex)
{
_logger.Error("An error occurred when trying to retrieve the cluster metadata, retrying.", ex);
// Can't await on catch
obtainingMetadataFailed = true;
}
if (obtainingMetadataFailed)
{
// There was a problem using the connection obtained, it is not usual but can happen
// Retry one more time and throw if there is problem
await Reconnect().ConfigureAwait(false);
}
}
/// <summary>
/// Tries to create the a connection to the cluster
/// </summary>
/// <exception cref="NoHostAvailableException" />
/// <exception cref="DriverInternalError" />
private Task<bool> Connect(bool firstTime)
{
IEnumerable<Host> hosts = _metadata.Hosts;
if (!firstTime)
{
_logger.Info("Trying to reconnect the ControlConnection");
//Use the load balancing policy to determine which host to use
hosts = _config.Policies.LoadBalancingPolicy.NewQueryPlan(null, null);
}
return IterateAndConnect(hosts.GetEnumerator(), new Dictionary<IPEndPoint, Exception>());
}
private async Task<bool> IterateAndConnect(IEnumerator<Host> hostsEnumerator, Dictionary<IPEndPoint, Exception> triedHosts)
{
var available = hostsEnumerator.MoveNext();
if (!available)
{
throw new NoHostAvailableException(triedHosts);
}
var host = hostsEnumerator.Current;
var c = new Connection(_serializer, host.Address, _config);
// Use a task to workaround "no await in catch"
Task<bool> nextTask;
try
{
await c.Open().ConfigureAwait(false);
_connection = c;
_host = host;
_logger.Info("Connection established to {0}", c.Address);
return true;
}
catch (UnsupportedProtocolVersionException ex)
{
// The server can respond with a message using a lower protocol version supported by the server
// or using the same version as the one provided
var nextVersion = _serializer.ProtocolVersion;
if (nextVersion == ex.ProtocolVersion || !nextVersion.IsSupported())
{
nextVersion = nextVersion.GetLowerSupported();
}
if (nextVersion == 0)
{
throw new DriverInternalError("Connection was unable to STARTUP using protocol version " +
ex.ProtocolVersion);
}
_serializer.ProtocolVersion = nextVersion;
_logger.Info(string.Format("{0}, trying with version {1:D}", ex.Message, nextVersion));
c.Dispose();
if (!nextVersion.IsSupported())
{
throw new DriverInternalError("Invalid protocol version " + nextVersion);
}
//Retry using the new protocol version
nextTask = Connect(true);
}
catch (Exception ex)
{
//There was a socket exception or an authentication exception
triedHosts.Add(host.Address, ex);
c.Dispose();
nextTask = IterateAndConnect(hostsEnumerator, triedHosts);
}
return await nextTask.ConfigureAwait(false);
}
internal async Task<bool> Reconnect()
{
var tcs = new TaskCompletionSource<bool>();
var currentTask = Interlocked.CompareExchange(ref _reconnectTask, tcs.Task, null);
if (currentTask != null)
{
// If there is another thread reconnecting, use the same task
return await currentTask.ConfigureAwait(false);
}
Unsubscribe();
try
{
await Connect(false).ConfigureAwait(false);
}
catch (Exception ex)
{
// It failed to reconnect, schedule the timer for next reconnection and let go.
Interlocked.Exchange(ref _reconnectTask, null).Forget();
tcs.TrySetException(ex);
var delay = _reconnectionSchedule.NextDelayMs();
_logger.Error("ControlConnection was not able to reconnect: " + ex);
try
{
_reconnectionTimer.Change((int)delay, Timeout.Infinite);
}
catch (ObjectDisposedException)
{
//Control connection is being disposed
}
// It will throw the same exception that it was set in the TCS
throw;
}
if (Interlocked.Read(ref _isShutdown) > 0L)
{
return false;
}
try
{
await RefreshNodeList().ConfigureAwait(false);
TaskHelper.WaitToComplete(_metadata.RefreshKeyspaces(false), MetadataAbortTimeout);
_reconnectionSchedule = _reconnectionPolicy.NewSchedule();
tcs.TrySetResult(true);
Interlocked.Exchange(ref _reconnectTask, null).Forget();
_logger.Info("ControlConnection reconnected to host {0}", _host.Address);
}
catch (Exception ex)
{
Interlocked.Exchange(ref _reconnectTask, null).Forget();
_logger.Error("There was an error when trying to refresh the ControlConnection", ex);
tcs.TrySetException(ex);
try
{
_reconnectionTimer.Change((int)_reconnectionSchedule.NextDelayMs(), Timeout.Infinite);
}
catch (ObjectDisposedException)
{
//Control connection is being disposed
}
}
return await tcs.Task;
}
internal async Task Refresh()
{
if (Interlocked.Increment(ref _refreshCounter) != 1)
{
//Only one refresh at a time
Interlocked.Decrement(ref _refreshCounter);
return;
}
var reconnect = false;
try
{
await RefreshNodeList().ConfigureAwait(false);
TaskHelper.WaitToComplete(_metadata.RefreshKeyspaces(false), MetadataAbortTimeout);
_reconnectionSchedule = _reconnectionPolicy.NewSchedule();
}
catch (SocketException ex)
{
_logger.Error("There was a SocketException when trying to refresh the ControlConnection", ex);
reconnect = true;
}
catch (Exception ex)
{
_logger.Error("There was an error when trying to refresh the ControlConnection", ex);
}
finally
{
Interlocked.Decrement(ref _refreshCounter);
}
if (reconnect)
{
await Reconnect().ConfigureAwait(false);
}
}
public void Shutdown()
{
if (Interlocked.Increment(ref _isShutdown) != 1)
{
//Only shutdown once
return;
}
var c = _connection;
if (c != null)
{
_logger.Info("Shutting down control connection to {0}", c.Address);
c.Dispose();
}
_reconnectionTimer.Change(Timeout.Infinite, Timeout.Infinite);
_reconnectionTimer.Dispose();
}
/// <summary>
/// Gets the next connection and setup the event listener for the host and connection.
/// Not thread-safe.
/// </summary>
private void SubscribeEventHandlers()
{
_host.Down += OnHostDown;
_connection.CassandraEventResponse += OnConnectionCassandraEvent;
//Register to events on the connection
var registerTask = _connection.Send(new RegisterForEventRequest(CassandraEventTypes));
TaskHelper.WaitToComplete(registerTask, 10000);
if (!(registerTask.Result is ReadyResponse))
{
throw new DriverInternalError("Expected ReadyResponse, obtained " + registerTask.Result.GetType().Name);
}
}
private void Unsubscribe()
{
var c = _connection;
var h = _host;
if (c != null)
{
c.CassandraEventResponse -= OnConnectionCassandraEvent;
}
if (h != null)
{
h.Down -= OnHostDown;
}
}
private void OnHostDown(Host h)
{
h.Down -= OnHostDown;
_logger.Warning("Host {0} used by the ControlConnection DOWN", h.Address);
Task.Factory.StartNew(() => Reconnect(), CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default);
}
private void OnConnectionCassandraEvent(object sender, CassandraEventArgs e)
{
//This event is invoked from a worker thread (not a IO thread)
if (e is TopologyChangeEventArgs)
{
var tce = (TopologyChangeEventArgs)e;
if (tce.What == TopologyChangeEventArgs.Reason.NewNode || tce.What == TopologyChangeEventArgs.Reason.RemovedNode)
{
// Start refresh
Refresh().Forget();
return;
}
}
if (e is StatusChangeEventArgs)
{
HandleStatusChangeEvent((StatusChangeEventArgs) e);
return;
}
if (e is SchemaChangeEventArgs)
{
var ssc = (SchemaChangeEventArgs)e;
if (!string.IsNullOrEmpty(ssc.Table))
{
//Can be either a table or a view
_metadata.RefreshTable(ssc.Keyspace, ssc.Table);
_metadata.RefreshView(ssc.Keyspace, ssc.Table);
return;
}
if (ssc.FunctionName != null)
{
_metadata.ClearFunction(ssc.Keyspace, ssc.FunctionName, ssc.Signature);
return;
}
if (ssc.AggregateName != null)
{
_metadata.ClearAggregate(ssc.Keyspace, ssc.AggregateName, ssc.Signature);
return;
}
if (ssc.Type != null)
{
return;
}
if (ssc.What == SchemaChangeEventArgs.Reason.Dropped)
{
_metadata.RemoveKeyspace(ssc.Keyspace);
return;
}
_metadata.RefreshSingleKeyspace(ssc.What == SchemaChangeEventArgs.Reason.Created, ssc.Keyspace);
}
}
private void HandleStatusChangeEvent(StatusChangeEventArgs e)
{
//The address in the Cassandra event message needs to be translated
var address = TranslateAddress(e.Address);
_logger.Info("Received Node status change event: host {0} is {1}", address, e.What.ToString().ToUpper());
Host host;
if (!_metadata.Hosts.TryGet(address, out host))
{
_logger.Info("Received status change event for host {0} but it was not found", address);
return;
}
var distance = Cluster.RetrieveDistance(host, _config.Policies.LoadBalancingPolicy);
if (distance != HostDistance.Ignored)
{
// We should not consider events for status changes
// We should trust the pools.
return;
}
if (e.What == StatusChangeEventArgs.Reason.Up)
{
host.BringUpIfDown();
return;
}
host.SetDown();
}
private IPEndPoint TranslateAddress(IPEndPoint value)
{
return _config.AddressTranslator.Translate(value);
}
private async Task RefreshNodeList()
{
_logger.Info("Refreshing node list");
var rsLocal = await QueryAsync(SelectLocal).ConfigureAwait(false);
var localRow = rsLocal.FirstOrDefault();
var rsPeers = await QueryAsync(SelectPeers).ConfigureAwait(false);
if (localRow == null)
{
_logger.Error("Local host metadata could not be retrieved");
return;
}
_metadata.Partitioner = localRow.GetValue<string>("partitioner");
UpdateLocalInfo(localRow);
UpdatePeersInfo(rsPeers);
_logger.Info("Node list retrieved successfully");
}
internal void UpdateLocalInfo(Row row)
{
var localhost = _host;
// Update cluster name, DC and rack for the one node we are connected to
var clusterName = row.GetValue<string>("cluster_name");
if (clusterName != null)
{
_metadata.ClusterName = clusterName;
}
localhost.SetLocationInfo(row.GetValue<string>("data_center"), row.GetValue<string>("rack"));
SetCassandraVersion(localhost, row);
localhost.Tokens = row.GetValue<IEnumerable<string>>("tokens") ?? new string[0];
_metadata.SetCassandraVersion(localhost.CassandraVersion);
}
internal void UpdatePeersInfo(IEnumerable<Row> rs)
{
var foundPeers = new HashSet<IPEndPoint>();
foreach (var row in rs)
{
var address = GetAddressForPeerHost(row, _config.AddressTranslator, _config.ProtocolOptions.Port);
if (address == null)
{
_logger.Error("No address found for host, ignoring it.");
continue;
}
foundPeers.Add(address);
var host = _metadata.GetHost(address);
if (host == null)
{
host = _metadata.AddHost(address);
}
host.SetLocationInfo(row.GetValue<string>("data_center"), row.GetValue<string>("rack"));
SetCassandraVersion(host, row);
host.Tokens = row.GetValue<IEnumerable<string>>("tokens") ?? new string[0];
}
// Removes all those that seems to have been removed (since we lost the control connection or not valid contact point)
foreach (var address in _metadata.AllReplicas())
{
if (!address.Equals(_host.Address) && !foundPeers.Contains(address))
{
_metadata.RemoveHost(address);
}
}
}
internal static void SetCassandraVersion(Host host, Row row)
{
try
{
var releaseVersion = row.GetValue<string>("release_version");
if (releaseVersion != null)
{
host.CassandraVersion = Version.Parse(releaseVersion.Split('-')[0]);
}
}
catch (Exception ex)
{
_logger.Error("There was an error while trying to retrieve the Cassandra version", ex);
}
}
/// <summary>
/// Uses system.peers values to build the Address translator
/// </summary>
internal static IPEndPoint GetAddressForPeerHost(Row row, IAddressTranslator translator, int port)
{
var address = row.GetValue<IPAddress>("rpc_address");
if (address == null)
{
return null;
}
if (BindAllAddress.Equals(address) && !row.IsNull("peer"))
{
address = row.GetValue<IPAddress>("peer");
_logger.Warning(String.Format("Found host with 0.0.0.0 as rpc_address, using listen_address ({0}) to contact it instead. If this is incorrect you should avoid the use of 0.0.0.0 server side.", address));
}
return translator.Translate(new IPEndPoint(address, port));
}
/// <summary>
/// Uses the active connection to execute a query
/// </summary>
public IEnumerable<Row> Query(string cqlQuery, bool retry = false)
{
return TaskHelper.WaitToComplete(QueryAsync(cqlQuery, retry), MetadataAbortTimeout);
}
public Task<IEnumerable<Row>> QueryAsync(string cqlQuery, bool retry = false)
{
var request = new QueryRequest(ProtocolVersion, cqlQuery, false, QueryProtocolOptions.Default);
var task = _connection
.Send(request)
.ContinueSync(GetRowSet);
if (!retry)
{
return task;
}
return task.ContinueWith(t =>
{
var ex = t.Exception != null ? t.Exception.InnerException : null;
if (ex is SocketException)
{
const string message = "There was an error while executing on the host {0} the query '{1}'";
_logger.Error(string.Format(message, cqlQuery, _connection.Address), ex);
//Reconnect and query again
return Reconnect()
.Then(_ => QueryAsync(cqlQuery, false));
}
return task;
}).Unwrap();
}
/// <summary>
/// Validates that the result contains a RowSet and returns it.
/// </summary>
/// <exception cref="NullReferenceException" />
/// <exception cref="DriverInternalError" />
public static IEnumerable<Row> GetRowSet(Response response)
{
if (response == null)
{
throw new NullReferenceException("Response can not be null");
}
if (!(response is ResultResponse))
{
throw new DriverInternalError("Expected rows, obtained " + response.GetType().FullName);
}
var result = (ResultResponse) response;
if (!(result.Output is OutputRows))
{
throw new DriverInternalError("Expected rows output, obtained " + result.Output.GetType().FullName);
}
return ((OutputRows) result.Output).RowSet;
}
}
/// <summary>
/// Represents an object that can execute metadata queries
/// </summary>
internal interface IMetadataQueryProvider
{
ProtocolVersion ProtocolVersion { get; }
/// <summary>
/// The address of the endpoint used by the ControlConnection
/// </summary>
IPEndPoint Address { get; }
Serializer Serializer { get; }
Task<IEnumerable<Row>> QueryAsync(string cqlQuery, bool retry = false);
IEnumerable<Row> Query(string cqlQuery, bool retry = false);
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type WorkbookRangeFormatBordersCollectionRequest.
/// </summary>
public partial class WorkbookRangeFormatBordersCollectionRequest : BaseRequest, IWorkbookRangeFormatBordersCollectionRequest
{
/// <summary>
/// Constructs a new WorkbookRangeFormatBordersCollectionRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public WorkbookRangeFormatBordersCollectionRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Adds the specified WorkbookRangeBorder to the collection via POST.
/// </summary>
/// <param name="workbookRangeBorder">The WorkbookRangeBorder to add.</param>
/// <returns>The created WorkbookRangeBorder.</returns>
public System.Threading.Tasks.Task<WorkbookRangeBorder> AddAsync(WorkbookRangeBorder workbookRangeBorder)
{
return this.AddAsync(workbookRangeBorder, CancellationToken.None);
}
/// <summary>
/// Adds the specified WorkbookRangeBorder to the collection via POST.
/// </summary>
/// <param name="workbookRangeBorder">The WorkbookRangeBorder to add.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created WorkbookRangeBorder.</returns>
public System.Threading.Tasks.Task<WorkbookRangeBorder> AddAsync(WorkbookRangeBorder workbookRangeBorder, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
return this.SendAsync<WorkbookRangeBorder>(workbookRangeBorder, cancellationToken);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <returns>The collection page.</returns>
public System.Threading.Tasks.Task<IWorkbookRangeFormatBordersCollectionPage> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the collection page.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The collection page.</returns>
public async System.Threading.Tasks.Task<IWorkbookRangeFormatBordersCollectionPage> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var response = await this.SendAsync<WorkbookRangeFormatBordersCollectionResponse>(null, cancellationToken).ConfigureAwait(false);
if (response != null && response.Value != null && response.Value.CurrentPage != null)
{
if (response.AdditionalData != null)
{
object nextPageLink;
response.AdditionalData.TryGetValue("@odata.nextLink", out nextPageLink);
var nextPageLinkString = nextPageLink as string;
if (!string.IsNullOrEmpty(nextPageLinkString))
{
response.Value.InitializeNextPageRequest(
this.Client,
nextPageLinkString);
}
// Copy the additional data collection to the page itself so that information is not lost
response.Value.AdditionalData = response.AdditionalData;
}
return response.Value;
}
return null;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Expand(Expression<Func<WorkbookRangeBorder, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Select(Expression<Func<WorkbookRangeBorder, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Adds the specified top value to the request.
/// </summary>
/// <param name="value">The top value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Top(int value)
{
this.QueryOptions.Add(new QueryOption("$top", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified filter value to the request.
/// </summary>
/// <param name="value">The filter value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Filter(string value)
{
this.QueryOptions.Add(new QueryOption("$filter", value));
return this;
}
/// <summary>
/// Adds the specified skip value to the request.
/// </summary>
/// <param name="value">The skip value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest Skip(int value)
{
this.QueryOptions.Add(new QueryOption("$skip", value.ToString()));
return this;
}
/// <summary>
/// Adds the specified orderby value to the request.
/// </summary>
/// <param name="value">The orderby value.</param>
/// <returns>The request object to send.</returns>
public IWorkbookRangeFormatBordersCollectionRequest OrderBy(string value)
{
this.QueryOptions.Add(new QueryOption("$orderby", value));
return this;
}
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
namespace DiscUtils.Ntfs
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
internal sealed class NonResidentAttributeRecord : AttributeRecord
{
private const ushort DefaultCompressionUnitSize = 4;
private ulong _startingVCN;
private ulong _lastVCN;
private ushort _dataRunsOffset;
private ushort _compressionUnitSize;
private ulong _dataAllocatedSize;
private ulong _dataRealSize;
private ulong _initializedDataSize;
private ulong _compressedSize;
private List<DataRun> _dataRuns;
public NonResidentAttributeRecord(byte[] buffer, int offset, out int length)
{
Read(buffer, offset, out length);
}
public NonResidentAttributeRecord(AttributeType type, string name, ushort id, AttributeFlags flags, long firstCluster, ulong numClusters, uint bytesPerCluster)
: base(type, name, id, flags)
{
_nonResidentFlag = 1;
_dataRuns = new List<DataRun>();
_dataRuns.Add(new DataRun(firstCluster, (long)numClusters, false));
_lastVCN = numClusters - 1;
_dataAllocatedSize = bytesPerCluster * numClusters;
_dataRealSize = bytesPerCluster * numClusters;
_initializedDataSize = bytesPerCluster * numClusters;
if ((flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0)
{
_compressionUnitSize = DefaultCompressionUnitSize;
}
}
public NonResidentAttributeRecord(AttributeType type, string name, ushort id, AttributeFlags flags, long startVcn, List<DataRun> dataRuns)
: base(type, name, id, flags)
{
_nonResidentFlag = 1;
_dataRuns = dataRuns;
_startingVCN = (ulong)startVcn;
if ((flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0)
{
_compressionUnitSize = DefaultCompressionUnitSize;
}
if (dataRuns != null && dataRuns.Count != 0)
{
_lastVCN = _startingVCN;
foreach (var run in dataRuns)
{
_lastVCN += (ulong)run.RunLength;
}
_lastVCN -= 1;
}
}
/// <summary>
/// The amount of space occupied by the attribute (in bytes).
/// </summary>
public override long AllocatedLength
{
get { return (long)_dataAllocatedSize; }
set { _dataAllocatedSize = (ulong)value; }
}
/// <summary>
/// The amount of data in the attribute (in bytes).
/// </summary>
public override long DataLength
{
get { return (long)_dataRealSize; }
set { _dataRealSize = (ulong)value; }
}
/// <summary>
/// The amount of initialized data in the attribute (in bytes).
/// </summary>
public override long InitializedDataLength
{
get { return (long)_initializedDataSize; }
set { _initializedDataSize = (ulong)value; }
}
public long CompressedDataSize
{
get { return (long)_compressedSize; }
set { _compressedSize = (ulong)value; }
}
public override long StartVcn
{
get { return (long)_startingVCN; }
}
public long LastVcn
{
get { return (long)_lastVCN; }
set { _lastVCN = (ulong)value; }
}
/// <summary>
/// Gets or sets the size of a compression unit (in clusters).
/// </summary>
public int CompressionUnitSize
{
get { return 1 << _compressionUnitSize; }
set { _compressionUnitSize = (ushort)Utilities.Log2(value); }
}
public List<DataRun> DataRuns
{
get { return _dataRuns; }
}
public override int Size
{
get
{
byte nameLength = 0;
ushort nameOffset = (ushort)(((Flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0) ? 0x48 : 0x40);
if (Name != null)
{
nameLength = (byte)Name.Length;
}
ushort dataOffset = (ushort)Utilities.RoundUp(nameOffset + (nameLength * 2), 8);
// Write out data first, since we know where it goes...
int dataLen = 0;
foreach (var run in _dataRuns)
{
dataLen += run.Size;
}
dataLen++; // NULL terminator
return Utilities.RoundUp(dataOffset + dataLen, 8);
}
}
public void ReplaceRun(DataRun oldRun, DataRun newRun)
{
int idx = _dataRuns.IndexOf(oldRun);
if (idx < 0)
{
throw new ArgumentException("Attempt to replace non-existant run", "oldRun");
}
_dataRuns[idx] = newRun;
}
public int RemoveRun(DataRun run)
{
int idx = _dataRuns.IndexOf(run);
if (idx < 0)
{
throw new ArgumentException("Attempt to remove non-existant run", "run");
}
_dataRuns.RemoveAt(idx);
return idx;
}
public void InsertRun(DataRun existingRun, DataRun newRun)
{
int idx = _dataRuns.IndexOf(existingRun);
if (idx < 0)
{
throw new ArgumentException("Attempt to replace non-existant run", "existingRun");
}
_dataRuns.Insert(idx + 1, newRun);
}
public void InsertRun(int index, DataRun newRun)
{
_dataRuns.Insert(index, newRun);
}
public override Range<long, long>[] GetClusters()
{
var cookedRuns = _dataRuns;
long start = 0;
List<Range<long, long>> result = new List<Range<long, long>>(_dataRuns.Count);
foreach (var run in cookedRuns)
{
if (!run.IsSparse)
{
start += run.RunOffset;
result.Add(new Range<long, long>(start, run.RunLength));
}
}
return result.ToArray();
}
public override IBuffer GetReadOnlyDataBuffer(INtfsContext context)
{
return new NonResidentDataBuffer(context, this);
}
public override int Write(byte[] buffer, int offset)
{
ushort headerLength = 0x40;
if ((Flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0)
{
headerLength += 0x08;
}
byte nameLength = 0;
ushort nameOffset = headerLength;
if (Name != null)
{
nameLength = (byte)Name.Length;
}
ushort dataOffset = (ushort)Utilities.RoundUp(headerLength + (nameLength * 2), 8);
// Write out data first, since we know where it goes...
int dataLen = 0;
foreach (var run in _dataRuns)
{
dataLen += run.Write(buffer, offset + dataOffset + dataLen);
}
buffer[offset + dataOffset + dataLen] = 0; // NULL terminator
dataLen++;
int length = (int)Utilities.RoundUp(dataOffset + dataLen, 8);
Utilities.WriteBytesLittleEndian((uint)_type, buffer, offset + 0x00);
Utilities.WriteBytesLittleEndian(length, buffer, offset + 0x04);
buffer[offset + 0x08] = _nonResidentFlag;
buffer[offset + 0x09] = nameLength;
Utilities.WriteBytesLittleEndian(nameOffset, buffer, offset + 0x0A);
Utilities.WriteBytesLittleEndian((ushort)_flags, buffer, offset + 0x0C);
Utilities.WriteBytesLittleEndian(_attributeId, buffer, offset + 0x0E);
Utilities.WriteBytesLittleEndian(_startingVCN, buffer, offset + 0x10);
Utilities.WriteBytesLittleEndian(_lastVCN, buffer, offset + 0x18);
Utilities.WriteBytesLittleEndian(dataOffset, buffer, offset + 0x20);
Utilities.WriteBytesLittleEndian(_compressionUnitSize, buffer, offset + 0x22);
Utilities.WriteBytesLittleEndian((uint)0, buffer, offset + 0x24); // Padding
Utilities.WriteBytesLittleEndian(_dataAllocatedSize, buffer, offset + 0x28);
Utilities.WriteBytesLittleEndian(_dataRealSize, buffer, offset + 0x30);
Utilities.WriteBytesLittleEndian(_initializedDataSize, buffer, offset + 0x38);
if ((Flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0)
{
Utilities.WriteBytesLittleEndian(_compressedSize, buffer, offset + 0x40);
}
if (Name != null)
{
Array.Copy(Encoding.Unicode.GetBytes(Name), 0, buffer, offset + nameOffset, nameLength * 2);
}
return length;
}
public AttributeRecord Split(int suggestedSplitIdx)
{
int splitIdx;
if (suggestedSplitIdx <= 0 || suggestedSplitIdx >= _dataRuns.Count)
{
splitIdx = _dataRuns.Count / 2;
}
else
{
splitIdx = suggestedSplitIdx;
}
long splitVcn = (long)_startingVCN;
long splitLcn = 0;
for (int i = 0; i < splitIdx; ++i)
{
splitVcn += _dataRuns[i].RunLength;
splitLcn += _dataRuns[i].RunOffset;
}
List<DataRun> newRecordRuns = new List<DataRun>();
while (_dataRuns.Count > splitIdx)
{
DataRun run = _dataRuns[splitIdx];
_dataRuns.RemoveAt(splitIdx);
newRecordRuns.Add(run);
}
// Each extent has implicit start LCN=0, so have to make stored runs match reality.
// However, take care not to stomp on 'sparse' runs that may be at the start of the
// new extent (indicated by Zero run offset).
for (int i = 0; i < newRecordRuns.Count; ++i)
{
if (!newRecordRuns[i].IsSparse)
{
newRecordRuns[i].RunOffset += splitLcn;
break;
}
}
_lastVCN = (ulong)splitVcn - 1;
return new NonResidentAttributeRecord(_type, _name, 0, _flags, splitVcn, newRecordRuns);
}
public override void Dump(TextWriter writer, string indent)
{
base.Dump(writer, indent);
writer.WriteLine(indent + " Starting VCN: " + _startingVCN);
writer.WriteLine(indent + " Last VCN: " + _lastVCN);
writer.WriteLine(indent + " Comp Unit Size: " + _compressionUnitSize);
writer.WriteLine(indent + " Allocated Size: " + _dataAllocatedSize);
writer.WriteLine(indent + " Real Size: " + _dataRealSize);
writer.WriteLine(indent + " Init Data Size: " + _initializedDataSize);
if ((Flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0)
{
writer.WriteLine(indent + " Compressed Size: " + _compressedSize);
}
string runStr = string.Empty;
foreach (DataRun run in _dataRuns)
{
runStr += " " + run.ToString();
}
writer.WriteLine(indent + " Data Runs:" + runStr);
}
protected override void Read(byte[] buffer, int offset, out int length)
{
_dataRuns = null;
base.Read(buffer, offset, out length);
_startingVCN = Utilities.ToUInt64LittleEndian(buffer, offset + 0x10);
_lastVCN = Utilities.ToUInt64LittleEndian(buffer, offset + 0x18);
_dataRunsOffset = Utilities.ToUInt16LittleEndian(buffer, offset + 0x20);
_compressionUnitSize = Utilities.ToUInt16LittleEndian(buffer, offset + 0x22);
_dataAllocatedSize = Utilities.ToUInt64LittleEndian(buffer, offset + 0x28);
_dataRealSize = Utilities.ToUInt64LittleEndian(buffer, offset + 0x30);
_initializedDataSize = Utilities.ToUInt64LittleEndian(buffer, offset + 0x38);
if ((Flags & (AttributeFlags.Compressed | AttributeFlags.Sparse)) != 0 && _dataRunsOffset > 0x40)
{
_compressedSize = Utilities.ToUInt64LittleEndian(buffer, offset + 0x40);
}
_dataRuns = new List<DataRun>();
int pos = _dataRunsOffset;
while (pos < length)
{
DataRun run = new DataRun();
int len = run.Read(buffer, offset + pos);
// Length 1 means there was only a header byte (i.e. terminator)
if (len == 1)
{
break;
}
_dataRuns.Add(run);
pos += len;
}
}
}
}
| |
//
// Copyright (c) 2004-2020 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
namespace NLog
{
#if !SILVERLIGHT
using System;
using System.Linq;
using NLog.Internal;
/// <summary>
/// Async version of <see cref="NestedDiagnosticsContext" /> - a logical context structure that keeps a stack
/// Allows for maintaining scope across asynchronous tasks and call contexts.
/// </summary>
public static class NestedDiagnosticsLogicalContext
{
/// <summary>
/// Pushes the specified value on current stack
/// </summary>
/// <param name="value">The value to be pushed.</param>
/// <returns>An instance of the object that implements IDisposable that returns the stack to the previous level when IDisposable.Dispose() is called. To be used with C# using() statement.</returns>
public static IDisposable Push<T>(T value)
{
var parent = GetThreadLocal();
var current = NestedContext<T>.CreateNestedContext(parent, value);
SetThreadLocal(current);
return current;
}
/// <summary>
/// Pushes the specified value on current stack
/// </summary>
/// <param name="value">The value to be pushed.</param>
/// <returns>An instance of the object that implements IDisposable that returns the stack to the previous level when IDisposable.Dispose() is called. To be used with C# using() statement.</returns>
public static IDisposable PushObject(object value)
{
return Push(value);
}
/// <summary>
/// Pops the top message off the NDLC stack.
/// </summary>
/// <returns>The top message which is no longer on the stack.</returns>
/// <remarks>this methods returns a object instead of string, this because of backwards-compatibility</remarks>
public static object Pop()
{
//NLOG 5: return string (breaking change)
return PopObject();
}
/// <summary>
/// Pops the top message from the NDLC stack.
/// </summary>
/// <param name="formatProvider">The <see cref="IFormatProvider"/> to use when converting the value to a string.</param>
/// <returns>The top message, which is removed from the stack, as a string value.</returns>
public static string Pop(IFormatProvider formatProvider)
{
return FormatHelper.ConvertToString(PopObject() ?? string.Empty, formatProvider);
}
/// <summary>
/// Pops the top message off the current NDLC stack
/// </summary>
/// <returns>The object from the top of the NDLC stack, if defined; otherwise <c>null</c>.</returns>
public static object PopObject()
{
var current = GetThreadLocal();
if (current != null)
SetThreadLocal(current.Parent);
return current?.Value;
}
/// <summary>
/// Peeks the top object on the current NDLC stack
/// </summary>
/// <returns>The object from the top of the NDLC stack, if defined; otherwise <c>null</c>.</returns>
public static object PeekObject()
{
return PeekContext(false)?.Value;
}
/// <summary>
/// Peeks the current scope, and returns its start time
/// </summary>
/// <returns>Scope Creation Time</returns>
internal static DateTime PeekTopScopeBeginTime()
{
return new DateTime(PeekContext(false)?.CreatedTimeUtcTicks ?? DateTime.MinValue.Ticks, DateTimeKind.Utc);
}
/// <summary>
/// Peeks the first scope, and returns its start time
/// </summary>
/// <returns>Scope Creation Time</returns>
internal static DateTime PeekBottomScopeBeginTime()
{
return new DateTime(PeekContext(true)?.CreatedTimeUtcTicks ?? DateTime.MinValue.Ticks, DateTimeKind.Utc);
}
private static INestedContext PeekContext(bool bottomScope)
{
var current = GetThreadLocal();
if (current != null)
{
if (bottomScope)
{
while (current.Parent != null)
current = current.Parent;
}
return current;
}
return null;
}
/// <summary>
/// Clears current stack.
/// </summary>
public static void Clear()
{
SetThreadLocal(null);
}
/// <summary>
/// Gets all messages on the stack.
/// </summary>
/// <returns>Array of strings on the stack.</returns>
public static string[] GetAllMessages()
{
return GetAllMessages(null);
}
/// <summary>
/// Gets all messages from the stack, without removing them.
/// </summary>
/// <param name="formatProvider">The <see cref="IFormatProvider"/> to use when converting a value to a string.</param>
/// <returns>Array of strings.</returns>
public static string[] GetAllMessages(IFormatProvider formatProvider)
{
return GetAllObjects().Select((o) => FormatHelper.ConvertToString(o, formatProvider)).ToArray();
}
/// <summary>
/// Gets all objects on the stack. The objects are not removed from the stack.
/// </summary>
/// <returns>Array of objects on the stack.</returns>
public static object[] GetAllObjects()
{
var currentContext = GetThreadLocal();
if (currentContext == null)
return ArrayHelper.Empty<object>();
int index = 0;
object[] messages = new object[currentContext.FrameLevel];
while (currentContext != null)
{
messages[index++] = currentContext.Value;
currentContext = currentContext.Parent;
}
return messages;
}
interface INestedContext : IDisposable
{
INestedContext Parent { get; }
int FrameLevel { get; }
object Value { get; }
long CreatedTimeUtcTicks { get; }
}
#if !NETSTANDARD1_0
[Serializable]
#endif
class NestedContext<T> : INestedContext
{
public INestedContext Parent { get; }
public T Value { get; }
public long CreatedTimeUtcTicks { get; }
public int FrameLevel { get; }
private int _disposed;
public static INestedContext CreateNestedContext(INestedContext parent, T value)
{
#if NET4_6 || NETSTANDARD
return new NestedContext<T>(parent, value);
#else
if (typeof(T).IsValueType || Convert.GetTypeCode(value) != TypeCode.Object)
return new NestedContext<T>(parent, value);
else
return new NestedContext<ObjectHandleSerializer>(parent, new ObjectHandleSerializer(value));
#endif
}
object INestedContext.Value
{
get
{
#if NET4_6 || NETSTANDARD
return Value;
#else
object value = Value;
if (value is ObjectHandleSerializer objectHandle)
{
return objectHandle.Unwrap();
}
return value;
#endif
}
}
public NestedContext(INestedContext parent, T value)
{
Parent = parent;
Value = value;
CreatedTimeUtcTicks = DateTime.UtcNow.Ticks; // Low time resolution, but okay fast
FrameLevel = parent?.FrameLevel + 1 ?? 1;
}
void IDisposable.Dispose()
{
if (System.Threading.Interlocked.Exchange(ref _disposed, 1) != 1)
{
PopObject();
}
}
public override string ToString()
{
object value = Value;
return value?.ToString() ?? "null";
}
}
private static void SetThreadLocal(INestedContext newValue)
{
#if NET4_6 || NETSTANDARD
AsyncNestedDiagnosticsContext.Value = newValue;
#else
if (newValue == null)
System.Runtime.Remoting.Messaging.CallContext.FreeNamedDataSlot(NestedDiagnosticsContextKey);
else
System.Runtime.Remoting.Messaging.CallContext.LogicalSetData(NestedDiagnosticsContextKey, newValue);
#endif
}
private static INestedContext GetThreadLocal()
{
#if NET4_6 || NETSTANDARD
return AsyncNestedDiagnosticsContext.Value;
#else
return System.Runtime.Remoting.Messaging.CallContext.LogicalGetData(NestedDiagnosticsContextKey) as INestedContext;
#endif
}
#if NET4_6 || NETSTANDARD
private static readonly System.Threading.AsyncLocal<INestedContext> AsyncNestedDiagnosticsContext = new System.Threading.AsyncLocal<INestedContext>();
#else
private const string NestedDiagnosticsContextKey = "NLog.AsyncableNestedDiagnosticsContext";
#endif
}
#endif
}
| |
// Visual Studio Shared Project
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Threading;
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Shell.Interop;
using VSConstants = Microsoft.VisualStudio.VSConstants;
namespace Microsoft.VisualStudioTools.Navigation {
/// <summary>
/// Implements a simple library that tracks project symbols, objects etc.
/// </summary>
class Library : IVsSimpleLibrary2 {
private Guid _guid;
private _LIB_FLAGS2 _capabilities;
private LibraryNode _root;
private uint _updateCount;
public Library(Guid libraryGuid) {
_guid = libraryGuid;
_root = new LibraryNode(null, String.Empty, String.Empty, LibraryNodeType.Package);
}
public _LIB_FLAGS2 LibraryCapabilities {
get { return _capabilities; }
set { _capabilities = value; }
}
internal void AddNode(LibraryNode node) {
lock (this) {
// re-create root node here because we may have handed out the node before and don't want to mutate it's list.
_root = _root.Clone();
_root.AddNode(node);
_updateCount++;
}
}
internal void RemoveNode(LibraryNode node) {
lock (this) {
_root = _root.Clone();
_root.RemoveNode(node);
_updateCount++;
}
}
#region IVsSimpleLibrary2 Members
public int AddBrowseContainer(VSCOMPONENTSELECTORDATA[] pcdComponent, ref uint pgrfOptions, out string pbstrComponentAdded) {
pbstrComponentAdded = null;
return VSConstants.E_NOTIMPL;
}
public int CreateNavInfo(SYMBOL_DESCRIPTION_NODE[] rgSymbolNodes, uint ulcNodes, out IVsNavInfo ppNavInfo) {
ppNavInfo = null;
return VSConstants.E_NOTIMPL;
}
public int GetBrowseContainersForHierarchy(IVsHierarchy pHierarchy, uint celt, VSBROWSECONTAINER[] rgBrowseContainers, uint[] pcActual) {
return VSConstants.E_NOTIMPL;
}
public int GetGuid(out Guid pguidLib) {
pguidLib = _guid;
return VSConstants.S_OK;
}
public int GetLibFlags2(out uint pgrfFlags) {
pgrfFlags = (uint)LibraryCapabilities;
return VSConstants.S_OK;
}
public int GetList2(uint ListType, uint flags, VSOBSEARCHCRITERIA2[] pobSrch, out IVsSimpleObjectList2 ppIVsSimpleObjectList2) {
if ((flags & (uint)_LIB_LISTFLAGS.LLF_RESOURCEVIEW) != 0) {
ppIVsSimpleObjectList2 = null;
return VSConstants.E_NOTIMPL;
}
ICustomSearchListProvider listProvider;
if (pobSrch != null &&
pobSrch.Length > 0) {
if ((listProvider = pobSrch[0].pIVsNavInfo as ICustomSearchListProvider) != null) {
switch ((_LIB_LISTTYPE)ListType) {
case _LIB_LISTTYPE.LLT_NAMESPACES:
ppIVsSimpleObjectList2 = listProvider.GetSearchList();
break;
default:
ppIVsSimpleObjectList2 = null;
return VSConstants.E_FAIL;
}
} else {
if (pobSrch[0].eSrchType == VSOBSEARCHTYPE.SO_ENTIREWORD && ListType == (uint)_LIB_LISTTYPE.LLT_MEMBERS) {
string srchText = pobSrch[0].szName;
int colonIndex;
if ((colonIndex = srchText.LastIndexOf(':')) != -1) {
string filename = srchText.Substring(0, srchText.LastIndexOf(':'));
foreach (ProjectLibraryNode project in _root.Children) {
foreach (var item in project.Children) {
if (item.FullName == filename) {
ppIVsSimpleObjectList2 = item.DoSearch(pobSrch[0]);
if (ppIVsSimpleObjectList2 != null) {
return VSConstants.S_OK;
}
}
}
}
}
ppIVsSimpleObjectList2 = null;
return VSConstants.E_FAIL;
} else if (pobSrch[0].eSrchType == VSOBSEARCHTYPE.SO_SUBSTRING && ListType == (uint)_LIB_LISTTYPE.LLT_NAMESPACES) {
var lib = new LibraryNode(null, "Search results " + pobSrch[0].szName, "Search results " + pobSrch[0].szName, LibraryNodeType.Package);
foreach (var item in SearchNodes(pobSrch[0], new SimpleObjectList<LibraryNode>(), _root).Children) {
lib.Children.Add(item);
}
ppIVsSimpleObjectList2 = lib;
return VSConstants.S_OK;
} else if ((pobSrch[0].grfOptions & (uint)_VSOBSEARCHOPTIONS.VSOBSO_LOOKINREFS) != 0
&& ListType == (uint)_LIB_LISTTYPE.LLT_HIERARCHY) {
LibraryNode node = pobSrch[0].pIVsNavInfo as LibraryNode;
if (node != null) {
var refs = node.FindReferences();
if (refs != null) {
ppIVsSimpleObjectList2 = refs;
return VSConstants.S_OK;
}
}
}
ppIVsSimpleObjectList2 = null;
return VSConstants.E_FAIL;
}
} else {
ppIVsSimpleObjectList2 = _root as IVsSimpleObjectList2;
}
return VSConstants.S_OK;
}
private static SimpleObjectList<LibraryNode> SearchNodes(VSOBSEARCHCRITERIA2 srch, SimpleObjectList<LibraryNode> list, LibraryNode curNode) {
foreach (var child in curNode.Children) {
if (child.Name.IndexOf(srch.szName, StringComparison.OrdinalIgnoreCase) != -1) {
list.Children.Add(child.Clone(child.Name));
}
SearchNodes(srch, list, child);
}
return list;
}
public void VisitNodes(ILibraryNodeVisitor visitor, CancellationToken ct = default(CancellationToken)) {
lock (this) {
_root.Visit(visitor, ct);
}
}
public int GetSeparatorStringWithOwnership(out string pbstrSeparator) {
pbstrSeparator = ".";
return VSConstants.S_OK;
}
public int GetSupportedCategoryFields2(int Category, out uint pgrfCatField) {
pgrfCatField = (uint)_LIB_CATEGORY2.LC_HIERARCHYTYPE | (uint)_LIB_CATEGORY2.LC_PHYSICALCONTAINERTYPE;
return VSConstants.S_OK;
}
public int LoadState(IStream pIStream, LIB_PERSISTTYPE lptType) {
return VSConstants.S_OK;
}
public int RemoveBrowseContainer(uint dwReserved, string pszLibName) {
return VSConstants.E_NOTIMPL;
}
public int SaveState(IStream pIStream, LIB_PERSISTTYPE lptType) {
return VSConstants.S_OK;
}
public int UpdateCounter(out uint pCurUpdate) {
pCurUpdate = _updateCount;
return VSConstants.S_OK;
}
public void Update() {
_updateCount++;
_root.Update();
}
#endregion
}
}
| |
//
// PresentationFrameworkCollection.cs: provides a wrapper to the unmanaged collection class
//
// Contact:
// Moonlight List ([email protected])
//
// Copyright 2007, 2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
#pragma warning disable 67 // "The event 'E' is never used" shown for ItemsChanged
using Mono;
using System;
using System.Windows;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
namespace System.Windows {
internal enum CollectionChangedAction {
Add,
Remove,
Replace,
Clearing,
Cleared,
}
public abstract partial class PresentationFrameworkCollection<T> : DependencyObject, INativeCollectionWrapper, IList<T>, IList {
const bool BoxValueTypes = false;
public static readonly System.Windows.DependencyProperty CountProperty =
DependencyProperty.Lookup (Kind.COLLECTION, "Count", typeof (double)); // <- double is not a typo
List<T> managedList = new List<T> ();
#if HEAPVIZ
internal override void AccumulateManagedRefs (List<HeapRef> refs)
{
for (int i = 0; i < managedList.Count; i ++) {
var obj = managedList[i];
if (typeof (INativeEventObjectWrapper).IsAssignableFrom (obj.GetType()))
refs.Add (new HeapRef (true, (INativeEventObjectWrapper)obj, string.Format ("[{0}]", i)));
}
base.AccumulateManagedRefs (refs);
}
#endif
IList INativeCollectionWrapper.ManagedList {
get { return managedList; }
}
int IList.Add (object value)
{
Add ((T)value);
return managedList.Count;
}
void IList.Remove (object value)
{
Remove ((T) value);
}
void IList.Insert (int index, object value)
{
Insert (index, (T)value);
}
object IList.this [int index] {
get { return this[index]; }
set { this[index] = (T)value; }
}
bool IList.Contains (object value)
{
return ((IList) this).IndexOf (value) != -1;
}
int IList.IndexOf (object value)
{
return IndexOf ((T) value);
}
public void Clear ()
{
ReadOnlyCheck ();
ClearImpl ();
}
public void RemoveAt (int index)
{
ReadOnlyCheck ();
RemoveAtImpl (index);
}
public void Add (T value)
{
ReadOnlyCheck ();
AddImpl (value);
}
public void Insert (int index, T value)
{
ReadOnlyCheck ();
InsertImpl (index, value);
}
public bool Remove (T value)
{
ReadOnlyCheck ();
return RemoveImpl (value);
}
public T this [int index] {
get {
return managedList[index];
}
set {
ReadOnlyCheck ();
SetItemImpl (index, value);
}
}
public bool Contains (T value)
{
return (IndexOfImpl (value) != -1);
}
public int IndexOf (T value)
{
return IndexOfImpl (value);
}
private void ReadOnlyCheck ()
{
if (IsReadOnly)
throw new InvalidOperationException ("the collection is readonly");
}
// most types that inherits from this throws ArgumentNullException when
// null value are used - except for ItemCollection
internal virtual bool NullCheck (NotifyCollectionChangedAction action, T value)
{
bool result = (value == null);
if (result && (action == NotifyCollectionChangedAction.Add))
throw new ArgumentNullException ();
return result;
}
internal event EventHandler Clearing;
internal event NotifyCollectionChangedEventHandler ItemsChanged;
//
// ICollection members
//
public int Count {
get { return managedList.Count; }
}
public void CopyTo (Array array, int index)
{
if (array == null)
throw new ArgumentNullException ("array");
if (index < 0)
throw new ArgumentOutOfRangeException ("index");
int n = Count;
for (int i = 0; i < n; i++)
array.SetValue (((IList) this)[i], index + i);
}
public void CopyTo (T [] array, int index)
{
if (array == null)
throw new ArgumentNullException ("array");
if (index < 0)
throw new ArgumentOutOfRangeException ("index");
int n = Count;
for (int i = 0; i < n; i++)
array[index + i] = this[i];
}
public object SyncRoot {
get {
return this;
}
}
public bool IsSynchronized {
get {
return false;
}
}
public IEnumerator<T> GetEnumerator ()
{
return managedList.GetEnumerator ();
}
IEnumerator IEnumerable.GetEnumerator ()
{
return GetEnumerator ();
}
public bool IsFixedSize {
get {
return false;
}
}
public bool IsReadOnly {
get {
return IsReadOnlyImpl ();
}
}
// the internal implementations.
internal void ClearImpl ()
{
var h = Clearing;
if (h != null)
h (this, EventArgs.Empty);
NativeMethods.collection_clear (native);
ItemsChanged.Raise (this, NotifyCollectionChangedAction.Reset);
}
internal virtual void AddImpl (T value)
{
AddImpl (value, BoxValueTypes);
}
internal void AddImpl (T value, bool boxValueTypes)
{
if (NullCheck (NotifyCollectionChangedAction.Add, value))
throw new ArgumentNullException ();
int index;
using (var val = Value.FromObject (value, boxValueTypes)) {
var v = val;
index = NativeMethods.collection_add (native, ref v);
}
ItemsChanged.Raise (this, NotifyCollectionChangedAction.Add, value, index);
}
internal virtual void InsertImpl (int index, T value)
{
InsertImpl (index, value, BoxValueTypes);
}
internal void InsertImpl (int index, T value, bool boxValueTypes)
{
if (NullCheck (NotifyCollectionChangedAction.Add, value))
throw new ArgumentNullException ();
if (index < 0)
throw new ArgumentOutOfRangeException ();
using (var val = Value.FromObject (value, boxValueTypes)) {
var v = val;
NativeMethods.collection_insert (native, index, ref v);
}
ItemsChanged.Raise (this, NotifyCollectionChangedAction.Add, value, index);
}
internal bool RemoveImpl (T value)
{
if (NullCheck (NotifyCollectionChangedAction.Remove, value))
return false;
int index = IndexOfImpl (value);
if (index == -1)
return false;
NativeMethods.collection_remove_at (native, index);
ItemsChanged.Raise (this, NotifyCollectionChangedAction.Remove, value, index);
return true;
}
internal void RemoveAtImpl (int index)
{
T value = GetItemImpl (index);
NativeMethods.collection_remove_at (native, index);
ItemsChanged.Raise (this, NotifyCollectionChangedAction.Remove, value, index);
}
internal T GetItemImpl (int index)
{
IntPtr val = NativeMethods.collection_get_value_at (native, index);
if (val == IntPtr.Zero)
return default(T);
return (T) Value.ToObject (typeof (T), val);
}
internal virtual void SetItemImpl (int index, T value)
{
SetItemImpl (index, value, BoxValueTypes);
}
internal void SetItemImpl (int index, T value, bool boxValueTypes)
{
T old = GetItemImpl (index);
using (var val = Value.FromObject (value, boxValueTypes)) {
var v = val;
NativeMethods.collection_set_value_at (native, index, ref v);
}
ItemsChanged.Raise (this, NotifyCollectionChangedAction.Replace, value, old, index);
}
internal virtual int IndexOfImpl (T value)
{
return IndexOfImpl (value, BoxValueTypes);
}
internal int IndexOfImpl (T value, bool boxValueTypes)
{
if (value == null)
return -1;
int rv;
using (var val = Value.FromObject (value, boxValueTypes)) {
var v = val;
rv = NativeMethods.collection_index_of (native, ref v);
}
return rv;
}
internal virtual bool IsReadOnlyImpl ()
{
return false;
}
}
}
| |
using System;
using NUnit.Framework;
using System.Collections.ObjectModel;
namespace OpenQA.Selenium
{
[TestFixture]
public class ExecutingAsyncJavascriptTest : DriverTestFixture
{
private IJavaScriptExecutor executor;
private TimeSpan originalTimeout = TimeSpan.MinValue;
[SetUp]
public void SetUpEnvironment()
{
if (driver is IJavaScriptExecutor)
{
executor = (IJavaScriptExecutor)driver;
}
try
{
originalTimeout = driver.Manage().Timeouts().AsynchronousJavaScript;
}
catch (NotImplementedException)
{
// For driver implementations that do not support getting timeouts,
// just set a default 30-second timeout.
originalTimeout = TimeSpan.FromSeconds(30);
}
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(1);
}
[TearDown]
public void TearDownEnvironment()
{
driver.Manage().Timeouts().AsynchronousJavaScript = originalTimeout;
}
[Test]
public void ShouldNotTimeoutIfCallbackInvokedImmediately()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1](123);");
Assert.That(result, Is.InstanceOf<long>());
Assert.That((long)result, Is.EqualTo(123));
}
[Test]
public void ShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NeitherNullNorUndefined()
{
driver.Url = ajaxyPage;
Assert.That((long)executor.ExecuteAsyncScript("arguments[arguments.length - 1](123);"), Is.EqualTo(123));
driver.Url = ajaxyPage;
Assert.That(executor.ExecuteAsyncScript("arguments[arguments.length - 1]('abc');").ToString(), Is.EqualTo("abc"));
driver.Url = ajaxyPage;
Assert.That((bool)executor.ExecuteAsyncScript("arguments[arguments.length - 1](false);"), Is.False);
driver.Url = ajaxyPage;
Assert.That((bool)executor.ExecuteAsyncScript("arguments[arguments.length - 1](true);"), Is.True);
}
[Test]
public void ShouldBeAbleToReturnJavascriptPrimitivesFromAsyncScripts_NullAndUndefined()
{
driver.Url = ajaxyPage;
Assert.That(executor.ExecuteAsyncScript("arguments[arguments.length - 1](null);"), Is.Null);
Assert.That(executor.ExecuteAsyncScript("arguments[arguments.length - 1]();"), Is.Null);
}
[Test]
public void ShouldBeAbleToReturnAnArrayLiteralFromAnAsyncScript()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1]([]);");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<object>>());
Assert.That((ReadOnlyCollection<object>)result, Has.Count.EqualTo(0));
}
[Test]
public void ShouldBeAbleToReturnAnArrayObjectFromAnAsyncScript()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1](new Array());");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<object>>());
Assert.That((ReadOnlyCollection<object>)result, Has.Count.EqualTo(0));
}
[Test]
public void ShouldBeAbleToReturnArraysOfPrimitivesFromAsyncScripts()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1]([null, 123, 'abc', true, false]);");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<object>>());
ReadOnlyCollection<object> resultList = result as ReadOnlyCollection<object>;
Assert.That(resultList.Count, Is.EqualTo(5));
Assert.That(resultList[0], Is.Null);
Assert.That((long)resultList[1], Is.EqualTo(123));
Assert.That(resultList[2].ToString(), Is.EqualTo("abc"));
Assert.That((bool)resultList[3], Is.True);
Assert.That((bool)resultList[4], Is.False);
}
[Test]
public void ShouldBeAbleToReturnWebElementsFromAsyncScripts()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1](document.body);");
Assert.That(result, Is.InstanceOf<IWebElement>());
Assert.That(((IWebElement)result).TagName.ToLower(), Is.EqualTo("body"));
}
[Test]
public void ShouldBeAbleToReturnArraysOfWebElementsFromAsyncScripts()
{
driver.Url = ajaxyPage;
object result = executor.ExecuteAsyncScript("arguments[arguments.length - 1]([document.body, document.body]);");
Assert.That(result, Is.Not.Null);
Assert.That(result, Is.InstanceOf<ReadOnlyCollection<IWebElement>>());
ReadOnlyCollection<IWebElement> resultsList = (ReadOnlyCollection<IWebElement>)result;
Assert.That(resultsList, Has.Count.EqualTo(2));
Assert.That(resultsList[0], Is.InstanceOf<IWebElement>());
Assert.That(resultsList[1], Is.InstanceOf<IWebElement>());
Assert.That(((IWebElement)resultsList[0]).TagName.ToLower(), Is.EqualTo("body"));
Assert.That(((IWebElement)resultsList[0]), Is.EqualTo((IWebElement)resultsList[1]));
}
[Test]
public void ShouldTimeoutIfScriptDoesNotInvokeCallback()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("return 1 + 2;"), Throws.InstanceOf<WebDriverTimeoutException>());
}
[Test]
public void ShouldTimeoutIfScriptDoesNotInvokeCallbackWithAZeroTimeout()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("window.setTimeout(function() {}, 0);"), Throws.InstanceOf<WebDriverTimeoutException>());
}
[Test]
public void ShouldNotTimeoutIfScriptCallsbackInsideAZeroTimeout()
{
driver.Url = ajaxyPage;
executor.ExecuteAsyncScript(
"var callback = arguments[arguments.length - 1];" +
"window.setTimeout(function() { callback(123); }, 0)");
}
[Test]
public void ShouldTimeoutIfScriptDoesNotInvokeCallbackWithLongTimeout()
{
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromMilliseconds(500);
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript(
"var callback = arguments[arguments.length - 1];" +
"window.setTimeout(callback, 1500);"), Throws.InstanceOf<WebDriverTimeoutException>());
}
[Test]
public void ShouldDetectPageLoadsWhileWaitingOnAnAsyncScriptAndReturnAnError()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("window.location = '" + dynamicPage + "';"), Throws.InstanceOf<WebDriverException>());
}
[Test]
public void ShouldCatchErrorsWhenExecutingInitialScript()
{
driver.Url = ajaxyPage;
Assert.That(() => executor.ExecuteAsyncScript("throw Error('you should catch this!');"), Throws.InstanceOf<WebDriverException>());
}
[Test]
public void ShouldNotTimeoutWithMultipleCallsTheFirstOneBeingSynchronous()
{
driver.Url = ajaxyPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromMilliseconds(1000);
Assert.That((bool)executor.ExecuteAsyncScript("arguments[arguments.length - 1](true);"), Is.True);
Assert.That((bool)executor.ExecuteAsyncScript("var cb = arguments[arguments.length - 1]; window.setTimeout(function(){cb(true);}, 9);"), Is.True);
}
[Test]
[IgnoreBrowser(Browser.Chrome, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.Edge, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.Firefox, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.IE, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.EdgeLegacy, ".NET language bindings do not properly parse JavaScript stack trace")]
[IgnoreBrowser(Browser.Safari, ".NET language bindings do not properly parse JavaScript stack trace")]
public void ShouldCatchErrorsWithMessageAndStacktraceWhenExecutingInitialScript()
{
driver.Url = ajaxyPage;
string js = "function functionB() { throw Error('errormessage'); };"
+ "function functionA() { functionB(); };"
+ "functionA();";
Exception ex = Assert.Catch(() => executor.ExecuteAsyncScript(js));
Assert.That(ex, Is.InstanceOf<WebDriverException>());
Assert.That(ex.Message.Contains("errormessage"));
Assert.That(ex.StackTrace.Contains("functionB"));
}
[Test]
public void ShouldBeAbleToExecuteAsynchronousScripts()
{
// Reset the timeout to the 30-second default instead of zero.
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(30);
driver.Url = ajaxyPage;
IWebElement typer = driver.FindElement(By.Name("typer"));
typer.SendKeys("bob");
Assert.AreEqual("bob", typer.GetAttribute("value"));
driver.FindElement(By.Id("red")).Click();
driver.FindElement(By.Name("submit")).Click();
Assert.AreEqual(1, GetNumberOfDivElements(), "There should only be 1 DIV at this point, which is used for the butter message");
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(10);
string text = (string)executor.ExecuteAsyncScript(
"var callback = arguments[arguments.length - 1];"
+ "window.registerListener(arguments[arguments.length - 1]);");
Assert.AreEqual("bob", text);
Assert.AreEqual("", typer.GetAttribute("value"));
Assert.AreEqual(2, GetNumberOfDivElements(), "There should be 1 DIV (for the butter message) + 1 DIV (for the new label)");
}
[Test]
public void ShouldBeAbleToPassMultipleArgumentsToAsyncScripts()
{
driver.Url = ajaxyPage;
long result = (long)executor.ExecuteAsyncScript("arguments[arguments.length - 1](arguments[0] + arguments[1]);", 1, 2);
Assert.AreEqual(3, result);
}
[Test]
public void ShouldBeAbleToMakeXMLHttpRequestsAndWaitForTheResponse()
{
string script =
"var url = arguments[0];" +
"var callback = arguments[arguments.length - 1];" +
// Adapted from http://www.quirksmode.org/js/xmlhttp.html
"var XMLHttpFactories = [" +
" function () {return new XMLHttpRequest()}," +
" function () {return new ActiveXObject('Msxml2.XMLHTTP')}," +
" function () {return new ActiveXObject('Msxml3.XMLHTTP')}," +
" function () {return new ActiveXObject('Microsoft.XMLHTTP')}" +
"];" +
"var xhr = false;" +
"while (!xhr && XMLHttpFactories.length) {" +
" try {" +
" xhr = XMLHttpFactories.shift().call();" +
" } catch (e) {}" +
"}" +
"if (!xhr) throw Error('unable to create XHR object');" +
"xhr.open('GET', url, true);" +
"xhr.onreadystatechange = function() {" +
" if (xhr.readyState == 4) callback(xhr.responseText);" +
"};" +
"xhr.send();";
driver.Url = ajaxyPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(3);
string response = (string)executor.ExecuteAsyncScript(script, sleepingPage + "?time=2");
Assert.AreEqual("<html><head><title>Done</title></head><body>Slept for 2s</body></html>", response.Trim());
}
[Test]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfScriptTriggersAlert()
{
driver.Url = simpleTestPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver).ExecuteAsyncScript(
"setTimeout(arguments[0], 200) ; setTimeout(function() { window.alert('Look! An alert!'); }, 50);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
string title = driver.Title;
}
[Test]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfAlertHappensDuringScript()
{
driver.Url = slowLoadingAlertPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver).ExecuteAsyncScript("setTimeout(arguments[0], 1000);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
// Shouldn't throw
string title = driver.Title;
}
[Test]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfScriptTriggersAlertWhichTimesOut()
{
driver.Url = simpleTestPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver)
.ExecuteAsyncScript("setTimeout(function() { window.alert('Look! An alert!'); }, 50);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
// Shouldn't throw
string title = driver.Title;
}
[Test]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void ThrowsIfAlertHappensDuringScriptWhichTimesOut()
{
driver.Url = slowLoadingAlertPage;
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
((IJavaScriptExecutor)driver).ExecuteAsyncScript("");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>());
// Shouldn't throw
string title = driver.Title;
}
[Test]
[IgnoreBrowser(Browser.EdgeLegacy, "Driver chooses not to return text from unhandled alert")]
[IgnoreBrowser(Browser.Firefox, "Driver chooses not to return text from unhandled alert")]
[IgnoreBrowser(Browser.Opera, "Does not handle async alerts")]
public void IncludesAlertTextInUnhandledAlertException()
{
driver.Manage().Timeouts().AsynchronousJavaScript = TimeSpan.FromSeconds(5);
string alertText = "Look! An alert!";
((IJavaScriptExecutor)driver).ExecuteAsyncScript(
"setTimeout(arguments[0], 200) ; setTimeout(function() { window.alert('" + alertText
+ "'); }, 50);");
Assert.That(() => driver.Title, Throws.InstanceOf<UnhandledAlertException>().With.Property("AlertText").EqualTo(alertText));
}
private long GetNumberOfDivElements()
{
IJavaScriptExecutor jsExecutor = driver as IJavaScriptExecutor;
// Selenium does not support "findElements" yet, so we have to do this through a script.
return (long)jsExecutor.ExecuteScript("return document.getElementsByTagName('div').length;");
}
}
}
| |
/* -----------------------------------------------------------------------------
* Rule_unreserved.cs
* -----------------------------------------------------------------------------
*
* Producer : com.parse2.aparse.Parser 2.5
* Produced : Sat Dec 18 07:35:23 GMT 2021
*
* -----------------------------------------------------------------------------
*/
using System;
using System.Collections.Generic;
sealed internal class Rule_unreserved:Rule
{
private Rule_unreserved(String spelling, List<Rule> rules) :
base(spelling, rules)
{
}
internal override Object Accept(Visitor visitor)
{
return visitor.Visit(this);
}
public static Rule_unreserved Parse(ParserContext context)
{
context.Push("unreserved");
Rule rule;
bool parsed = true;
ParserAlternative b;
int s0 = context.index;
ParserAlternative a0 = new ParserAlternative(s0);
List<ParserAlternative> as1 = new List<ParserAlternative>();
parsed = false;
{
int s1 = context.index;
ParserAlternative a1 = new ParserAlternative(s1);
parsed = true;
if (parsed)
{
bool f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_ALPHA.Parse(context);
if ((f1 = rule != null))
{
a1.Add(rule, context.index);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
as1.Add(a1);
}
context.index = s1;
}
{
int s1 = context.index;
ParserAlternative a1 = new ParserAlternative(s1);
parsed = true;
if (parsed)
{
bool f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Rule_DIGIT.Parse(context);
if ((f1 = rule != null))
{
a1.Add(rule, context.index);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
as1.Add(a1);
}
context.index = s1;
}
{
int s1 = context.index;
ParserAlternative a1 = new ParserAlternative(s1);
parsed = true;
if (parsed)
{
bool f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.Parse(context, "-");
if ((f1 = rule != null))
{
a1.Add(rule, context.index);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
as1.Add(a1);
}
context.index = s1;
}
{
int s1 = context.index;
ParserAlternative a1 = new ParserAlternative(s1);
parsed = true;
if (parsed)
{
bool f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.Parse(context, ".");
if ((f1 = rule != null))
{
a1.Add(rule, context.index);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
as1.Add(a1);
}
context.index = s1;
}
{
int s1 = context.index;
ParserAlternative a1 = new ParserAlternative(s1);
parsed = true;
if (parsed)
{
bool f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.Parse(context, "_");
if ((f1 = rule != null))
{
a1.Add(rule, context.index);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
as1.Add(a1);
}
context.index = s1;
}
{
int s1 = context.index;
ParserAlternative a1 = new ParserAlternative(s1);
parsed = true;
if (parsed)
{
bool f1 = true;
int c1 = 0;
for (int i1 = 0; i1 < 1 && f1; i1++)
{
rule = Terminal_StringValue.Parse(context, "~");
if ((f1 = rule != null))
{
a1.Add(rule, context.index);
c1++;
}
}
parsed = c1 == 1;
}
if (parsed)
{
as1.Add(a1);
}
context.index = s1;
}
b = ParserAlternative.GetBest(as1);
parsed = b != null;
if (parsed)
{
a0.Add(b.rules, b.end);
context.index = b.end;
}
rule = null;
if (parsed)
{
rule = new Rule_unreserved(context.text.Substring(a0.start, a0.end - a0.start), a0.rules);
}
else
{
context.index = s0;
}
context.Pop("unreserved", parsed);
return (Rule_unreserved)rule;
}
}
/* -----------------------------------------------------------------------------
* eof
* -----------------------------------------------------------------------------
*/
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.ApiManagement;
using Microsoft.Azure.Management.ApiManagement.SmapiModels;
namespace Microsoft.Azure.Management.ApiManagement
{
/// <summary>
/// .Net client wrapper for the REST API for Azure ApiManagement Service
/// </summary>
public static partial class ProductsOperationsExtensions
{
/// <summary>
/// Create new product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='parameters'>
/// Required. Create or update parameters.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Create(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid, ProductCreateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductsOperations)s).CreateAsync(resourceGroupName, serviceName, pid, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Create new product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='parameters'>
/// Required. Create or update parameters.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> CreateAsync(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid, ProductCreateParameters parameters)
{
return operations.CreateAsync(resourceGroupName, serviceName, pid, parameters, CancellationToken.None);
}
/// <summary>
/// Delete product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <param name='deleteSubscriptions'>
/// Required. Delete existing subscriptions to the product ot not.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Delete(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid, string etag, bool deleteSubscriptions)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductsOperations)s).DeleteAsync(resourceGroupName, serviceName, pid, etag, deleteSubscriptions);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Delete product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <param name='deleteSubscriptions'>
/// Required. Delete existing subscriptions to the product ot not.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> DeleteAsync(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid, string etag, bool deleteSubscriptions)
{
return operations.DeleteAsync(resourceGroupName, serviceName, pid, etag, deleteSubscriptions, CancellationToken.None);
}
/// <summary>
/// Get specific product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <returns>
/// Get Product operation response details.
/// </returns>
public static ProductGetResponse Get(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductsOperations)s).GetAsync(resourceGroupName, serviceName, pid);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get specific product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <returns>
/// Get Product operation response details.
/// </returns>
public static Task<ProductGetResponse> GetAsync(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid)
{
return operations.GetAsync(resourceGroupName, serviceName, pid, CancellationToken.None);
}
/// <summary>
/// List all products.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='query'>
/// Optional.
/// </param>
/// <returns>
/// List Products operation response details.
/// </returns>
public static ProductListResponse List(this IProductsOperations operations, string resourceGroupName, string serviceName, QueryParameters query)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductsOperations)s).ListAsync(resourceGroupName, serviceName, query);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List all products.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='query'>
/// Optional.
/// </param>
/// <returns>
/// List Products operation response details.
/// </returns>
public static Task<ProductListResponse> ListAsync(this IProductsOperations operations, string resourceGroupName, string serviceName, QueryParameters query)
{
return operations.ListAsync(resourceGroupName, serviceName, query, CancellationToken.None);
}
/// <summary>
/// List all products.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='nextLink'>
/// Required. NextLink from the previous successful call to List
/// operation.
/// </param>
/// <returns>
/// List Products operation response details.
/// </returns>
public static ProductListResponse ListNext(this IProductsOperations operations, string nextLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductsOperations)s).ListNextAsync(nextLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// List all products.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='nextLink'>
/// Required. NextLink from the previous successful call to List
/// operation.
/// </param>
/// <returns>
/// List Products operation response details.
/// </returns>
public static Task<ProductListResponse> ListNextAsync(this IProductsOperations operations, string nextLink)
{
return operations.ListNextAsync(nextLink, CancellationToken.None);
}
/// <summary>
/// Update product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='parameters'>
/// Required. Update parameters.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Update(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid, ProductUpdateParameters parameters, string etag)
{
return Task.Factory.StartNew((object s) =>
{
return ((IProductsOperations)s).UpdateAsync(resourceGroupName, serviceName, pid, parameters, etag);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Update product.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.ApiManagement.IProductsOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the resource group.
/// </param>
/// <param name='serviceName'>
/// Required. The name of the Api Management service.
/// </param>
/// <param name='pid'>
/// Required. Identifier of the product.
/// </param>
/// <param name='parameters'>
/// Required. Update parameters.
/// </param>
/// <param name='etag'>
/// Required. ETag.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> UpdateAsync(this IProductsOperations operations, string resourceGroupName, string serviceName, string pid, ProductUpdateParameters parameters, string etag)
{
return operations.UpdateAsync(resourceGroupName, serviceName, pid, parameters, etag, CancellationToken.None);
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Text;
using System.Data;
namespace Serialize.LiquidNET
{
[Serializable()]
public sealed class BCO : IEnumerable
{
private int _ContextID;
private Engine _Engine;
private System.Data.DataTable _DataTable;
private BCOEnumerator _Enumerator;
private string _TableName;
private BEOCollection _beoCollection;
private string _RootRelation;
private IBEO _RootBEO;
private PropertyMap _Properties;
private RetrieveContext _RetrieveContext;
public PropertyMap Properties
{
get
{
return _Properties;
}
}
public IBEO RootBEO
{
get
{
return _RootBEO;
}
set
{
_RootBEO = value;
}
}
public string RootRelation
{
get
{
return _RootRelation;
}
set
{
_RootRelation = value;
}
}
public int ContextID
{
get
{
return _ContextID;
}
}
public string TableName
{
get
{
return _TableName;
}
}
public System.Data.DataTable DataTable
{
get
{
return _DataTable;
}
}
public Engine Engine
{
get
{
return _Engine;
}
}
public int RowCount
{
get
{
return this.DataTable.Rows.Count;
}
}
internal BCO(Engine aEngine, ArrayList list)
{
_Engine = aEngine;
assignSource(list);
}
~BCO()
{
//System.Diagnostics.Debug.WriteLine("~BCO Destructor " + _TableName + " " + RowCount.ToString());
_beoCollection.Clear();
}
public delegate void BCOCallback(object sender, EventArgs e);
public event BCOCallback Callback;
private void assignSource(ArrayList list)
{
_Properties = new PropertyMap();
_ContextID = (int)list[0];
_TableName = (string)list[1];
_DataTable = (DataTable)list[2];
ColumnMap map = (ColumnMap)list[3];
_RetrieveContext = (RetrieveContext)list[4];
if (_beoCollection != null)
{
_beoCollection.Clear();
_beoCollection = null;
}
_beoCollection = new BEOCollection();
foreach (ColumnDef def in map)
{
_Properties.AddPropertyDef(string.Empty, def.ColumnIndex, def.ColumnName, def.DBType, def.SystemType, def.KeyType);
}
}
public void Refresh()
{
ArrayList list = _Engine.Factory.Retrieve(_ContextID, _RetrieveContext);
assignSource(list);
if (_Enumerator != null)
_Enumerator.Reset();
}
public object Save()
{
ArrayList list = new ArrayList();
DataTable tblChanged = _DataTable.GetChanges();
if (tblChanged != null)
{
//tblChanged.RejectChanges();
list.Add(_ContextID);
list.Add(_TableName);
list.Add(_DataTable);
ColumnMap map = new ColumnMap();
foreach (PropertyDef def in _Properties)
{
map.AddColumnDef(def.ColumnIndex, def.ColumnName, def.DBType, def.SystemType, def.KeyType);
}
list.Add(map);
list.Add(_RetrieveContext);
list = (ArrayList) _Engine.Factory.Update(list);
//this.assignSource(list);
if (Callback != null) Callback(this, new EventArgs());
//Console.WriteLine("DataTable Rows {0} tblChanges Rows {1}", _DataTable.Rows.Count, tblChanged.Rows.Count);
//tblChanged = _DataTable.GetChanges();
//if (tblChanged != null)
//{
// Console.WriteLine("DataTable Rows {0} tblChanges Rows {1}", _DataTable.Rows.Count, tblChanged.Rows.Count);
//}
//_DataTable.AcceptChanges();
return true;
}
return null;
}
public void DeleteAll()
{
int count = _DataTable.Rows.Count;
for (int i = 0; i < count; i++)
{
DataRow row = _DataTable.Rows[i];
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached)
{
int idx = _DataTable.Rows.IndexOf(row);
object[] keys = fetchKeys(row);
if (!_beoCollection.Contains(keys))
{
IBEO beo = _Engine.CreateBEO(_ContextID);
((BEO)beo).AttachSource(this, idx);
beo.Delete();
}
else
{
IBEO beo = _beoCollection[keys];
_beoCollection.Remove(beo);
beo.Delete();
}
}
}
}
public T BEObyIndex<T>(int index)
{
return (T)BEObyIndex(index);
}
public IBEO BEObyIndex(int aIndex)
{
try
{
if (aIndex < 0 || _DataTable.Rows.Count <= aIndex)
{
return null;
}
DataRow row = _DataTable.Rows[aIndex];
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached)
{
object[] keys = fetchKeys(row);
if (!_beoCollection.Contains(keys))
{
IBEO beo = _Engine.CreateBEO(_ContextID);
((BEO)beo).AttachSource(this, aIndex);
_beoCollection.Add(beo);
return beo;
}
else
{
return _beoCollection[keys];
}
}
return null;
}
catch
{
throw;
}
}
public T BEObyPropertyValue<T>(string aProperty, object aValue)
{
try
{
// fetch DataType from Property
PropertyDef def = _Properties.GetWithPropertyName(aProperty);
if (def != null)
{
foreach (DataRow row in _DataTable.Rows)
{
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached && row[def.ColumnName].Equals(aValue))
{
int idx = _DataTable.Rows.IndexOf(row);
return BEObyIndex<T>(idx);
}
}
}
return default(T);
}
catch
{
throw;
}
}
public IBEO BEObyPropertyValue(string aProperty, object aValue)
{
try
{
// fetch DataType from Property
PropertyDef def = _Properties.GetWithPropertyName(aProperty);
if (def != null)
{
foreach (DataRow row in _DataTable.Rows)
{
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached && row[def.ColumnName].Equals(aValue))
{
int idx = _DataTable.Rows.IndexOf(row);
return BEObyIndex(idx);
}
}
}
return null;
}
catch
{
throw;
}
}
public IBEO BEObyPrimaryKey(object aPrimaryKey)
{
try
{
DataColumn[] priCols = _DataTable.PrimaryKey;
if (priCols.Length != 1)
{
throw new ArgumentException("PrimaryKey columns count does not match");
}
string select = String.Empty;
for (int i = 0; i < priCols.Length; i++)
{
DataColumn col = priCols[i];
if (i > 0)
select += " AND ";
select += col.ColumnName + " = " + aPrimaryKey.ToString();
}
//if (rows.Length != 1)
//{
// throw new InvalidOperationException("wrong row count by primarykey returned");
//}
//foreach (DataRow rowTemp in _DataTable.Rows)
//{
// System.Diagnostics.Debug.WriteLine(_DataTable.Columns[0].ColumnName + ": " + rowTemp[0].ToString());
//}
DataRow[] rows = _DataTable.Select(select);
if (rows.Length == 1)
{
DataRow row = rows[0];
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached)
{
int rowID = _DataTable.Rows.IndexOf(row);
object[] keys = new object[] { aPrimaryKey };
if (!_beoCollection.Contains(keys))
{
IBEO beo = _Engine.CreateBEO(_ContextID);
((BEO)beo).AttachSource(this, rowID);
_beoCollection.Add(beo);
return beo;
}
else
{
return _beoCollection[keys];
}
}
}
return null;
}
catch(Exception ex)
{
throw ex;
}
}
public IBEO BEObyPrimaryKey(object[] aPrimaryKeys)
{
try
{
DataRow row = _DataTable.Rows.Find(aPrimaryKeys);
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached)
{
int rowID = _DataTable.Rows.IndexOf(row);
if (!_beoCollection.Contains(aPrimaryKeys))
{
IBEO beo = _Engine.CreateBEO(_ContextID);
((BEO)beo).AttachSource(this, rowID);
_beoCollection.Add(beo);
return beo;
}
else
{
return _beoCollection[aPrimaryKeys];
}
}
return null;
}
catch
{
throw;
}
}
private object[] fetchKeys(DataRow row)
{
ArrayList list = new ArrayList();
if (row != null)
{
PropertyMap map = this.Properties.PrimaryKeyProperties;
for (int i = 0; i < map.Count; i++)
{
PropertyDef def = map[i];
list.Add(row[def.ColumnName]);
}
}
return list.ToArray();
}
private object[] TransformKey(BEOKey beokey)
{
ArrayList list = new ArrayList();
int length;
foreach (BEOKeyColumn col in beokey)
{
list.Add(col.Value);
}
return list.ToArray();
}
internal int RowIndexFromKey(BEOKey beokey)
{
ArrayList list = new ArrayList();
foreach (BEOKeyColumn col in beokey)
{
list.Add(col.Value);
}
DataRow row = _DataTable.Rows.Find(list.ToArray());
if (row != null && row.RowState != DataRowState.Deleted && row.RowState != DataRowState.Detached)
return _DataTable.Rows.IndexOf(row);
return -1;
}
public IBEO NewBEO()
{
return this.NewBEO(new object[] { Engine.nextSequence(this.TableName) });
}
public IBEO NewBEO(object aNewPrimaryKey)
{
object[] obj = new object[] { aNewPrimaryKey };
return this.NewBEO(obj);
}
public IBEO NewBEO(object[] aNewPrimaryKeys)
{
try
{
// create beo
IBEO beo = _Engine.CreateBEO(_ContextID);
// create row
DataRow dr = _DataTable.NewRow();
// assignment of new primary key
PropertyMap pmap = _Properties.PrimaryKeyProperties;
if (aNewPrimaryKeys.Length != pmap.Count)
{
throw new Exception("new PrimaryKey Count doesn't match the PrimaryKey Columns Count");
}
for (int i = 0; i < aNewPrimaryKeys.Length; i++)
{
//dr[pmap[i].ColumnName] = Convert.ToInt32(aNewPrimaryKeys[i]);
dr[pmap[i].ColumnName] = aNewPrimaryKeys[i];
}
// add row
_DataTable.Rows.Add(dr);
((BEO)beo).AttachSource(this, _DataTable.Rows.IndexOf(dr));
//beo.RegisterBusinessObject();
_beoCollection.Add(beo);
// new beo from Definition
if (_RetrieveContext == null)
{
_RetrieveContext = new RetrieveContext("PrimaryKey", beo.Key);
}
return beo;
}
catch
{
throw;
}
}
public void CopyForeignKeys(IBEO source, IBEO target, string relation)
{
PropertyDef def = _Properties.GetRelationProperty(relation);
try
{
if ((source != null) && (target != null))
{
object val = source.GetSourceValue(def.FKField);
target.SetSourceValue(def.PropertyName, val);
}
}
catch
{
throw new ArgumentException("relation is unkown");
}
}
internal bool CheckDeleted(int aRowIndex)
{
try
{
if (aRowIndex < 0)
return true;
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null)
{
if (row.RowState == DataRowState.Deleted)
{
return true;
}
}
return false;
}
catch (Exception)
{
throw;
}
}
internal bool CheckAdded(int aRowIndex)
{
try
{
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null)
{
if (row.RowState == DataRowState.Added)
{
return true;
}
}
return false;
}
catch (Exception)
{
throw;
}
}
internal bool CheckModified(int aRowIndex)
{
try
{
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null)
{
if (row.RowState == DataRowState.Modified)
{
return true;
}
}
return false;
}
catch (Exception)
{
throw;
}
}
internal bool CheckDetached(int aRowIndex)
{
try
{
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null)
{
if (row.RowState == DataRowState.Detached)
{
return true;
}
}
return false;
}
catch (Exception)
{
throw;
}
}
internal object GetSourceValue(int aRowIndex, string aPropertyName)
{
string colName = _Properties.ColumnName(aPropertyName);
if (colName == String.Empty)
return null;
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null && row.RowState != DataRowState.Deleted)
{
if (row[colName] is DBNull)
{
switch (_Properties.GetWithPropertyName(aPropertyName).SystemType)
{
case "System.Byte":
return default(System.Byte);
case "System.Int32":
return default(System.Int32);
case "System.Int64":
return default(System.Int64);
case "System.Double":
return default(System.Double);
case "System.DateTime":
return DateTime.Parse("01-01-1970");
case "System.Boolean":
return false;
case "System.Byte[]":
return new byte[] {};
default:
return String.Empty;
}
}
else
return _DataTable.Rows[aRowIndex][colName];
}
return null;
}
internal bool SetSourceValue(int aRowIndex, string aPropertyName, object aValue)
{
string colName = _Properties.ColumnName(aPropertyName);
if (colName == String.Empty)
return false;
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null && row.RowState != DataRowState.Deleted)
{
row[colName] = aValue;
return true;
}
return false;
}
internal object Delete(int aRowIndex)
{
DataRow row = _DataTable.Rows[aRowIndex];
if (row != null)
{
object[] keys = fetchKeys(row);
if (_beoCollection.Contains(keys))
{
IBEO beo = _beoCollection[keys];
_beoCollection.Remove(beo);
beo = null;
}
row.Delete();
return true;
}
return false;
}
public List<T> AsList<T>()
{
List<T> list = new List<T>();
foreach (IBEO beo in this)
{
if (beo is T)
list.Add((T)beo);
}
return list;
}
#region IEnumerable
public IEnumerator GetEnumerator()
{
if (_Enumerator == null)
{
_Enumerator = new BCOEnumerator(this);
}
else
{
_Enumerator.Reset();
}
return _Enumerator;
}
public BCOEnumerator GetBCOEnumerator()
{
if (_Enumerator == null)
{
_Enumerator = new BCOEnumerator(this);
}
else
{
//_Enumerator.Reset();
}
return _Enumerator;
}
#endregion
}
}
| |
/******************************************************************************
* The MIT License
* Copyright (c) 2003 Novell Inc. www.novell.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the Software), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*******************************************************************************/
//
// Novell.Directory.Ldap.LdapDITContentRuleSchema.cs
//
// Author:
// Sunil Kumar ([email protected])
//
// (C) 2003 Novell, Inc (http://www.novell.com)
//
using System;
using SchemaParser = Novell.Directory.Ldap.Utilclass.SchemaParser;
using AttributeQualifier = Novell.Directory.Ldap.Utilclass.AttributeQualifier;
namespace Novell.Directory.Ldap
{
/// <summary> Represents a DIT (Directory Information Tree) content rule
/// in a directory schema.
///
/// The LdapDITContentRuleSchema class is used to discover or modify
/// additional auxiliary classes, mandatory and optional attributes, and
/// restricted attributes in effect for an object class.
/// </summary>
public class LdapDITContentRuleSchema:LdapSchemaElement
{
/// <summary> Returns the list of allowed auxiliary classes.
///
/// </summary>
/// <returns> The list of allowed auxiliary classes.
/// </returns>
virtual public System.String[] AuxiliaryClasses
{
get
{
return auxiliary;
}
}
/// <summary> Returns the list of additional required attributes for an entry
/// controlled by this content rule.
///
/// </summary>
/// <returns> The list of additional required attributes.
/// </returns>
virtual public System.String[] RequiredAttributes
{
get
{
return required;
}
}
/// <summary> Returns the list of additional optional attributes for an entry
/// controlled by this content rule.
///
/// </summary>
/// <returns> The list of additional optional attributes.
/// </returns>
virtual public System.String[] OptionalAttributes
{
get
{
return optional;
}
}
/// <summary> Returns the list of precluded attributes for an entry controlled by
/// this content rule.
///
/// </summary>
/// <returns> The list of precluded attributes.
/// </returns>
virtual public System.String[] PrecludedAttributes
{
get
{
return precluded;
}
}
private System.String[] auxiliary = new System.String[]{""};
private System.String[] required = new System.String[]{""};
private System.String[] optional = new System.String[]{""};
private System.String[] precluded = new System.String[]{""};
/// <summary> Constructs a DIT content rule for adding to or deleting from the
/// schema.
///
/// </summary>
/// <param name="names"> The names of the content rule.
///
/// </param>
/// <param name="oid"> The unique object identifier of the content rule -
/// in dotted numerical format.
///
/// </param>
/// <param name="description">The optional description of the content rule.
///
/// </param>
/// <param name="obsolete"> True if the content rule is obsolete.
///
/// </param>
/// <param name="auxiliary"> A list of auxiliary object classes allowed for
/// an entry to which this content rule applies.
/// These may either be specified by name or
/// numeric oid.
///
/// </param>
/// <param name="required"> A list of attributes that an entry
/// to which this content rule applies must
/// contain in addition to its normal set of
/// mandatory attributes. These attributes may be
/// specified by either name or numeric oid.
///
/// </param>
/// <param name="optional"> A list of attributes that an entry
/// to which this content rule applies may contain
/// in addition to its normal set of optional
/// attributes. These attributes may be specified by
/// either name or numeric oid.
///
/// </param>
/// <param name="precluded"> A list, consisting of a subset of the optional
/// attributes of the structural and
/// auxiliary object classes which are precluded
/// from an entry to which this content rule
/// applies. These may be specified by either name
/// or numeric oid.
/// </param>
public LdapDITContentRuleSchema(System.String[] names, System.String oid, System.String description, bool obsolete, System.String[] auxiliary, System.String[] required, System.String[] optional, System.String[] precluded):base(LdapSchema.schemaTypeNames[LdapSchema.DITCONTENT])
{
base.names = new System.String[names.Length];
names.CopyTo(base.names, 0);
base.oid = oid;
base.description = description;
base.obsolete = obsolete;
this.auxiliary = auxiliary;
this.required = required;
this.optional = optional;
this.precluded = precluded;
base.Value = formatString();
return ;
}
/// <summary> Constructs a DIT content rule from the raw string value returned from a
/// schema query for dITContentRules.
///
/// </summary>
/// <param name="raw"> The raw string value returned from a schema query
/// for content rules.
/// </param>
public LdapDITContentRuleSchema(System.String raw):base(LdapSchema.schemaTypeNames[LdapSchema.DITCONTENT])
{
base.obsolete = false;
try
{
SchemaParser parser = new SchemaParser(raw);
if (parser.Names != null)
{
base.names = new System.String[parser.Names.Length];
parser.Names.CopyTo(base.names, 0);
}
if ((System.Object) parser.ID != null)
base.oid = parser.ID;
if ((System.Object) parser.Description != null)
base.description = parser.Description;
if (parser.Auxiliary != null)
{
auxiliary = new System.String[parser.Auxiliary.Length];
parser.Auxiliary.CopyTo(auxiliary, 0);
}
if (parser.Required != null)
{
required = new System.String[parser.Required.Length];
parser.Required.CopyTo(required, 0);
}
if (parser.Optional != null)
{
optional = new System.String[parser.Optional.Length];
parser.Optional.CopyTo(optional, 0);
}
if (parser.Precluded != null)
{
precluded = new System.String[parser.Precluded.Length];
parser.Precluded.CopyTo(precluded, 0);
}
base.obsolete = parser.Obsolete;
System.Collections.IEnumerator qualifiers = parser.Qualifiers;
AttributeQualifier attrQualifier;
while (qualifiers.MoveNext())
{
attrQualifier = (AttributeQualifier) qualifiers.Current;
setQualifier(attrQualifier.Name, attrQualifier.Values);
}
base.Value = formatString();
}
catch (System.IO.IOException e)
{
}
return ;
}
/// <summary> Returns a string in a format suitable for directly adding to a
/// directory, as a value of the particular schema element class.
///
/// </summary>
/// <returns> A string representation of the class' definition.
/// </returns>
protected internal override System.String formatString()
{
System.Text.StringBuilder valueBuffer = new System.Text.StringBuilder("( ");
System.String token;
System.String[] strArray;
if ((System.Object) (token = ID) != null)
{
valueBuffer.Append(token);
}
strArray = Names;
if (strArray != null)
{
valueBuffer.Append(" NAME ");
if (strArray.Length == 1)
{
valueBuffer.Append("'" + strArray[0] + "'");
}
else
{
valueBuffer.Append("( ");
for (int i = 0; i < strArray.Length; i++)
{
valueBuffer.Append(" '" + strArray[i] + "'");
}
valueBuffer.Append(" )");
}
}
if ((System.Object) (token = Description) != null)
{
valueBuffer.Append(" DESC ");
valueBuffer.Append("'" + token + "'");
}
if (Obsolete)
{
valueBuffer.Append(" OBSOLETE");
}
if ((strArray = AuxiliaryClasses) != null)
{
valueBuffer.Append(" AUX ");
if (strArray.Length > 1)
valueBuffer.Append("( ");
for (int i = 0; i < strArray.Length; i++)
{
if (i > 0)
valueBuffer.Append(" $ ");
valueBuffer.Append(strArray[i]);
}
if (strArray.Length > 1)
valueBuffer.Append(" )");
}
if ((strArray = RequiredAttributes) != null)
{
valueBuffer.Append(" MUST ");
if (strArray.Length > 1)
valueBuffer.Append("( ");
for (int i = 0; i < strArray.Length; i++)
{
if (i > 0)
valueBuffer.Append(" $ ");
valueBuffer.Append(strArray[i]);
}
if (strArray.Length > 1)
valueBuffer.Append(" )");
}
if ((strArray = OptionalAttributes) != null)
{
valueBuffer.Append(" MAY ");
if (strArray.Length > 1)
valueBuffer.Append("( ");
for (int i = 0; i < strArray.Length; i++)
{
if (i > 0)
valueBuffer.Append(" $ ");
valueBuffer.Append(strArray[i]);
}
if (strArray.Length > 1)
valueBuffer.Append(" )");
}
if ((strArray = PrecludedAttributes) != null)
{
valueBuffer.Append(" NOT ");
if (strArray.Length > 1)
valueBuffer.Append("( ");
for (int i = 0; i < strArray.Length; i++)
{
if (i > 0)
valueBuffer.Append(" $ ");
valueBuffer.Append(strArray[i]);
}
if (strArray.Length > 1)
valueBuffer.Append(" )");
}
System.Collections.IEnumerator en;
if ((en = QualifierNames) != null)
{
System.String qualName;
System.String[] qualValue;
while (en.MoveNext())
{
qualName = ((System.String) en.Current);
valueBuffer.Append(" " + qualName + " ");
if ((qualValue = getQualifier(qualName)) != null)
{
if (qualValue.Length > 1)
valueBuffer.Append("( ");
for (int i = 0; i < qualValue.Length; i++)
{
if (i > 0)
valueBuffer.Append(" ");
valueBuffer.Append("'" + qualValue[i] + "'");
}
if (qualValue.Length > 1)
valueBuffer.Append(" )");
}
}
}
valueBuffer.Append(" )");
return valueBuffer.ToString();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using System.Text;
using Newtonsoft.Json.Linq;
using Xamarin.Auth;
using Xamarin.Social;
namespace Stampsy.Social.Services
{
public class TwitterManager : ServiceManager
{
private static readonly Uri BaseUri = new Uri ("https://api.twitter.com/1.1/");
public override string Name {
get { return "Twitter"; }
}
public override string [] KnownServiceIds {
get {
return new [] { "Twitter" };
}
}
public TwitterManager (params Func<Service> [] fallbackChain)
: base (fallbackChain)
{
}
#region Public API
public override Task<ServiceUser> GetProfileAsync (CancellationToken token = default (CancellationToken), LoginOptions options = default (LoginOptions))
{
return this.WithSession (
() => this.GetProfile (token),
options,
token
);
}
public override Task ShareAsync (Item item, CancellationToken token = default (CancellationToken), LoginOptions options = default (LoginOptions))
{
return this.WithSession (
() => this.Share (item, token),
options,
token
);
}
public override Task<Page<IEnumerable<ServiceUser>>> GetFriendsAsync (Page<IEnumerable<ServiceUser>> previous = null, CancellationToken token = default (CancellationToken), LoginOptions options = default (LoginOptions))
{
return GetFriendsAsync (100, previous, token, options);
}
public Task<Page<IEnumerable<ServiceUser>>> GetFriendsAsync (int itemsPerPage = 100, Page<IEnumerable<ServiceUser>> previous = null, CancellationToken token = default (CancellationToken), LoginOptions options = default (LoginOptions))
{
return this.WithSession (
() => this.GetFriends (itemsPerPage, previous, token),
options,
token
);
}
#endregion
#region Implementation
Task<Page<IEnumerable<ServiceUser>>> GetFriends (int itemsPerPage, Page<IEnumerable<ServiceUser>> previous, CancellationToken token)
{
var session = EnsureLoggedIn ();
var request = session.Service.CreateRequest (
"GET",
new Uri (BaseUri, "friends/list.json"),
new Dictionary<string, string> {
{ "count", itemsPerPage.ToString () },
{ "cursor", (previous != null) ? previous.NextPageToken : "-1" },
{ "skip_status", "true" },
{ "include_user_entities", "false" }
},
session.Account
);
return ParsePageAsync (request,
(json) => json ["users"].Children<JObject> ().Select (ParseUser),
token
);
}
Task<ServiceUser> GetProfile (CancellationToken token)
{
var session = EnsureLoggedIn ();
var request = session.Service.CreateRequest (
"GET",
new Uri ("https://api.twitter.com/1.1/users/show.json"),
new Dictionary<string, string> { { "screen_name", session.Account.Username } },
session.Account
);
return ParseAsync (request, ParseProfile, token);
}
Task Share (Item item, CancellationToken token)
{
var session = EnsureLoggedIn ();
//
// Combine the links into the tweet
//
var sb = new StringBuilder ();
sb.Append (item.Text);
foreach (var l in item.Links) {
sb.Append (" ");
sb.Append (l.AbsoluteUri);
}
var status = sb.ToString ();
//
// Create the request
//
Request req;
if (item.Images.Count == 0) {
req = session.Service.CreateRequest ("POST", new Uri ("https://api.twitter.com/1.1/statuses/update.json"), session.Account);
req.Parameters["status"] = status;
}
else {
req = session.Service.CreateRequest ("POST", new Uri ("https://api.twitter.com/1.1/statuses/update_with_media.json"), session.Account);
req.AddMultipartData ("status", status);
foreach (var i in item.Images.Take (session.Service.MaxImages)) {
i.AddToRequest (req, "media[]");
}
}
//
// Send it
//
return req.GetResponseAsync (token);
}
#endregion
protected override void HandleResponseException (Exception ex)
{
var sex = ex as SocialException;
if (sex != null)
throw new ApiException ("Seems like iOS account got deleted.", sex, ApiExceptionKind.Unauthorized);
}
protected override void HandleResponseJson (Response response, JToken json)
{
if (json.Type != JTokenType.Object)
return;
var errs = json ["errors"];
if (errs == null)
return;
var err = errs [0];
var code = err.Value<int> ("code");
var msg = err.Value<string> ("message");
switch (code) {
case 215: // Bad Authentication data
throw new ApiException (msg, code, response, ApiExceptionKind.Unauthorized);
default:
throw new ApiException (msg, code, response, ApiExceptionKind.Other);
}
}
protected override string ParsePageToken (JToken json)
{
var nextCursor = json.Value<long> ("next_cursor");
return (nextCursor != 0)
? nextCursor.ToString ()
: null;
}
private ServiceUser ParseProfile (JToken user)
{
var profile = ParseUser (user);
// getting full-size picture
profile.ImageUrl = profile.ImageUrl.Replace ("_normal.", ".");
return profile;
}
protected override ServiceUser ParseUser (JToken user)
{
return new ServiceUser {
Id = user.Value<string> ("id_str"),
Name = user.Value<string> ("name"),
ImageUrl = user.Value<string> ("profile_image_url"),
Nickname = user.Value<string> ("screen_name"),
Location = user.Value<string> ("location"),
Gender = null
};
}
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="HtmlAnchor.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
/*
* HtmlAnchor.cs
*
* Copyright (c) 2000 Microsoft Corporation
*/
namespace System.Web.UI.HtmlControls {
using System.ComponentModel;
using System;
using System.Collections;
using System.Web;
using System.Web.UI;
using System.Web.Util;
using System.Security.Permissions;
/// <devdoc>
/// <para>The <see langword='HtmlAnchor'/>
/// class defines the methods, properties, and
/// events for the HtmlAnchor control.
/// This
/// class
/// allows programmatic access to the
/// HTML <a> element on the server.</para>
/// </devdoc>
[
DefaultEvent("ServerClick"),
SupportsEventValidation,
]
public class HtmlAnchor : HtmlContainerControl, IPostBackEventHandler {
private static readonly object EventServerClick = new object();
/*
* Creates an intrinsic Html A control.
*/
/// <devdoc>
/// <para>Initializes a new instance of the <see cref='System.Web.UI.HtmlControls.HtmlAnchor'/> class.</para>
/// </devdoc>
public HtmlAnchor() : base("a") {
}
[
WebCategory("Behavior"),
DefaultValue(true),
]
public virtual bool CausesValidation {
get {
object b = ViewState["CausesValidation"];
return((b == null) ? true : (bool)b);
}
set {
ViewState["CausesValidation"] = value;
}
}
/*
* Href property.
*/
/// <devdoc>
/// <para>Gets or sets the URL target of the link specified in the
/// <see cref='System.Web.UI.HtmlControls.HtmlAnchor'/>
/// server control.</para>
/// </devdoc>
[
WebCategory("Navigation"),
DefaultValue(""),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
UrlProperty()
]
public string HRef {
get {
string s = Attributes["href"];
return((s != null) ? s : String.Empty);
}
set {
Attributes["href"] = MapStringAttributeToString(value);
}
}
/*
* Name of group this radio is in.
*/
/// <devdoc>
/// <para>Gets or sets the bookmark name defined in the <see cref='System.Web.UI.HtmlControls.HtmlAnchor'/>
/// server
/// control.</para>
/// </devdoc>
[
WebCategory("Navigation"),
DefaultValue(""),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public string Name {
get {
string s = Attributes["name"];
return((s != null) ? s : String.Empty);
}
set {
Attributes["name"] = MapStringAttributeToString(value);
}
}
/*
* Target window property.
*/
/// <devdoc>
/// <para>
/// Gets or
/// sets the target window or frame
/// to load linked Web page content into.
/// </para>
/// </devdoc>
[
WebCategory("Navigation"),
DefaultValue(""),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public string Target {
get {
string s = Attributes["target"];
return((s != null) ? s : String.Empty);
}
set {
Attributes["target"] = MapStringAttributeToString(value);
}
}
/*
* Title property.
*/
/// <devdoc>
/// <para> Gets or sets the title that
/// the browser displays when identifying linked content.</para>
/// </devdoc>
[
WebCategory("Appearance"),
Localizable(true),
DefaultValue(""),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)
]
public string Title {
get {
string s = Attributes["title"];
return((s != null) ? s : String.Empty);
}
set {
Attributes["title"] = MapStringAttributeToString(value);
}
}
[
WebCategory("Behavior"),
DefaultValue(""),
WebSysDescription(SR.PostBackControl_ValidationGroup)
]
public virtual string ValidationGroup {
get {
string s = (string)ViewState["ValidationGroup"];
return((s == null) ? string.Empty : s);
}
set {
ViewState["ValidationGroup"] = value;
}
}
/// <devdoc>
/// <para>Occurs on the server when a user clicks the <see cref='System.Web.UI.HtmlControls.HtmlAnchor'/> control on the
/// browser.</para>
/// </devdoc>
[
WebCategory("Action"),
WebSysDescription(SR.HtmlControl_OnServerClick)
]
public event EventHandler ServerClick {
add {
Events.AddHandler(EventServerClick, value);
}
remove {
Events.RemoveHandler(EventServerClick, value);
}
}
private PostBackOptions GetPostBackOptions() {
PostBackOptions options = new PostBackOptions(this, string.Empty);
options.RequiresJavaScriptProtocol = true;
if (CausesValidation && Page.GetValidators(ValidationGroup).Count > 0) {
options.PerformValidation = true;
options.ValidationGroup = ValidationGroup;
}
return options;
}
/// <internalonly/>
protected internal override void OnPreRender(EventArgs e) {
base.OnPreRender(e);
if (Page != null && Events[EventServerClick] != null) {
Page.RegisterPostBackScript();
// VSWhidbey 489577
if (CausesValidation && Page.GetValidators(ValidationGroup).Count > 0) {
Page.RegisterWebFormsScript();
}
}
}
/*
* Override to generate postback code for onclick.
*/
/// <internalonly/>
/// <devdoc>
/// </devdoc>
protected override void RenderAttributes(HtmlTextWriter writer) {
if (Events[EventServerClick] != null) {
Attributes.Remove("href");
base.RenderAttributes(writer);
PostBackOptions options = GetPostBackOptions();
Debug.Assert(options != null);
string postBackEventReference = Page.ClientScript.GetPostBackEventReference(options, true);
Debug.Assert(!string.IsNullOrEmpty(postBackEventReference));
writer.WriteAttribute("href", postBackEventReference, true);
}
else {
PreProcessRelativeReferenceAttribute(writer, "href");
base.RenderAttributes(writer);
}
}
/*
* Method used to raise the OnServerClick event.
*/
/// <devdoc>
/// <para>Raises the <see langword='ServerClick'/>
/// event.</para>
/// </devdoc>
protected virtual void OnServerClick(EventArgs e) {
EventHandler handler = (EventHandler)Events[EventServerClick];
if (handler != null) handler(this, e);
}
/*
* Method of IPostBackEventHandler interface to raise events on post back.
* Button fires an OnServerClick event.
*/
/// <internalonly/>
/// <devdoc>
/// </devdoc>
void IPostBackEventHandler.RaisePostBackEvent(string eventArgument) {
RaisePostBackEvent(eventArgument);
}
/// <internalonly/>
/// <devdoc>
/// </devdoc>
protected virtual void RaisePostBackEvent(string eventArgument) {
ValidateEvent(UniqueID, eventArgument);
if (CausesValidation) {
Page.Validate(ValidationGroup);
}
OnServerClick(EventArgs.Empty);
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using AutoMapper;
using Microsoft.Azure.Commands.Compute.Automation.Models;
using Microsoft.Azure.Management.Compute;
using Microsoft.Azure.Management.Compute.Models;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
namespace Microsoft.Azure.Commands.Compute.Automation
{
public partial class InvokeAzureComputeMethodCmdlet : ComputeAutomationBaseCmdlet
{
protected object CreateVirtualMachineScaleSetVMGetDynamicParameters()
{
dynamicParameters = new RuntimeDefinedParameterDictionary();
var pResourceGroupName = new RuntimeDefinedParameter();
pResourceGroupName.Name = "ResourceGroupName";
pResourceGroupName.ParameterType = typeof(string);
pResourceGroupName.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 1,
Mandatory = true
});
pResourceGroupName.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ResourceGroupName", pResourceGroupName);
var pVMScaleSetName = new RuntimeDefinedParameter();
pVMScaleSetName.Name = "VMScaleSetName";
pVMScaleSetName.ParameterType = typeof(string);
pVMScaleSetName.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 2,
Mandatory = true
});
pVMScaleSetName.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("VMScaleSetName", pVMScaleSetName);
var pInstanceId = new RuntimeDefinedParameter();
pInstanceId.Name = "InstanceId";
pInstanceId.ParameterType = typeof(string);
pInstanceId.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 3,
Mandatory = true
});
pInstanceId.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("InstanceId", pInstanceId);
var pArgumentList = new RuntimeDefinedParameter();
pArgumentList.Name = "ArgumentList";
pArgumentList.ParameterType = typeof(object[]);
pArgumentList.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByStaticParameters",
Position = 4,
Mandatory = true
});
pArgumentList.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ArgumentList", pArgumentList);
return dynamicParameters;
}
protected void ExecuteVirtualMachineScaleSetVMGetMethod(object[] invokeMethodInputParameters)
{
string resourceGroupName = (string)ParseParameter(invokeMethodInputParameters[0]);
string vmScaleSetName = (string)ParseParameter(invokeMethodInputParameters[1]);
string instanceId = (string)ParseParameter(invokeMethodInputParameters[2]);
if (!string.IsNullOrEmpty(resourceGroupName) && !string.IsNullOrEmpty(vmScaleSetName) && !string.IsNullOrEmpty(instanceId))
{
var result = VirtualMachineScaleSetVMsClient.Get(resourceGroupName, vmScaleSetName, instanceId);
WriteObject(result);
}
else if (!string.IsNullOrEmpty(resourceGroupName) && !string.IsNullOrEmpty(vmScaleSetName))
{
var result = VirtualMachineScaleSetVMsClient.List(resourceGroupName, vmScaleSetName);
var resultList = result.ToList();
var nextPageLink = result.NextPageLink;
while (!string.IsNullOrEmpty(nextPageLink))
{
var pageResult = VirtualMachineScaleSetVMsClient.ListNext(nextPageLink);
foreach (var pageItem in pageResult)
{
resultList.Add(pageItem);
}
nextPageLink = pageResult.NextPageLink;
}
WriteObject(resultList, true);
}
}
}
public partial class NewAzureComputeArgumentListCmdlet : ComputeAutomationBaseCmdlet
{
protected PSArgument[] CreateVirtualMachineScaleSetVMGetParameters()
{
string resourceGroupName = string.Empty;
string vmScaleSetName = string.Empty;
string instanceId = string.Empty;
return ConvertFromObjectsToArguments(
new string[] { "ResourceGroupName", "VMScaleSetName", "InstanceId" },
new object[] { resourceGroupName, vmScaleSetName, instanceId });
}
}
[Cmdlet(VerbsCommon.Get, "AzureRmVmssVM", DefaultParameterSetName = "DefaultParameter")]
[OutputType(typeof(PSVirtualMachineScaleSetVM))]
public partial class GetAzureRmVmssVM : ComputeAutomationBaseCmdlet
{
protected override void ProcessRecord()
{
AutoMapper.Mapper.AddProfile<ComputeAutomationAutoMapperProfile>();
ExecuteClientAction(() =>
{
string resourceGroupName = this.ResourceGroupName;
string vmScaleSetName = this.VMScaleSetName;
string instanceId = this.InstanceId;
if (!string.IsNullOrEmpty(resourceGroupName) && !string.IsNullOrEmpty(vmScaleSetName) && !string.IsNullOrEmpty(instanceId))
{
if (this.ParameterSetName.Equals("FriendMethod"))
{
var result = VirtualMachineScaleSetVMsClient.GetInstanceView(resourceGroupName, vmScaleSetName, instanceId);
var psObject = new PSVirtualMachineScaleSetVMInstanceView();
Mapper.Map<VirtualMachineScaleSetVMInstanceView, PSVirtualMachineScaleSetVMInstanceView>(result, psObject);
WriteObject(psObject);
}
else
{
var result = VirtualMachineScaleSetVMsClient.Get(resourceGroupName, vmScaleSetName, instanceId);
var psObject = new PSVirtualMachineScaleSetVM();
Mapper.Map<VirtualMachineScaleSetVM, PSVirtualMachineScaleSetVM>(result, psObject);
WriteObject(psObject);
}
}
else if (!string.IsNullOrEmpty(resourceGroupName) && !string.IsNullOrEmpty(vmScaleSetName))
{
var result = VirtualMachineScaleSetVMsClient.List(resourceGroupName, vmScaleSetName);
var resultList = result.ToList();
var nextPageLink = result.NextPageLink;
while (!string.IsNullOrEmpty(nextPageLink))
{
var pageResult = VirtualMachineScaleSetVMsClient.ListNext(nextPageLink);
foreach (var pageItem in pageResult)
{
resultList.Add(pageItem);
}
nextPageLink = pageResult.NextPageLink;
}
var psObject = new List<PSVirtualMachineScaleSetVMList>();
foreach (var r in resultList)
{
psObject.Add(Mapper.Map<VirtualMachineScaleSetVM, PSVirtualMachineScaleSetVMList>(r));
}
WriteObject(psObject, true);
}
});
}
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 1,
Mandatory = false,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Parameter(
ParameterSetName = "FriendMethod",
Position = 1,
Mandatory = false,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[AllowNull]
public string ResourceGroupName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 2,
Mandatory = false,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Parameter(
ParameterSetName = "FriendMethod",
Position = 2,
Mandatory = false,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Alias("Name")]
[AllowNull]
public string VMScaleSetName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 3,
Mandatory = false,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Parameter(
ParameterSetName = "FriendMethod",
Position = 3,
Mandatory = false,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[AllowNull]
public string InstanceId { get; set; }
[Parameter(
ParameterSetName = "FriendMethod",
Mandatory = true)]
[AllowNull]
public SwitchParameter InstanceView { get; set; }
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace Contoso.API.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
/*
* QUANTCONNECT.COM - Democratizing Finance, Empowering Individuals.
* Lean Algorithmic Trading Engine v2.0. Copyright 2014 QuantConnect Corporation.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using QuantConnect.Data.Market;
using QuantConnect.Interfaces;
using QuantConnect.Orders;
using QuantConnect.Orders.Fees;
using QuantConnect.Orders.Fills;
using QuantConnect.Orders.Slippage;
using QuantConnect.Securities;
namespace QuantConnect.Algorithm.CSharp
{
/// <summary>
/// Demonstration of using custom fee, slippage, fill, and buying power models for modelling transactions in backtesting.
/// QuantConnect allows you to model all orders as deeply and accurately as you need.
/// </summary>
/// <meta name="tag" content="trading and orders" />
/// <meta name="tag" content="transaction fees and slippage" />
/// <meta name="tag" content="custom buying power models" />
/// <meta name="tag" content="custom transaction models" />
/// <meta name="tag" content="custom slippage models" />
/// <meta name="tag" content="custom fee models" />
public class CustomModelsAlgorithm : QCAlgorithm, IRegressionAlgorithmDefinition
{
private Security _security;
private Symbol _spy;
public override void Initialize()
{
SetStartDate(2013, 10, 01);
SetEndDate(2013, 10, 31);
_security = AddEquity("SPY", Resolution.Hour);
_spy = _security.Symbol;
// set our models
_security.SetFeeModel(new CustomFeeModel(this));
_security.SetFillModel(new CustomFillModel(this));
_security.SetSlippageModel(new CustomSlippageModel(this));
_security.SetBuyingPowerModel(new CustomBuyingPowerModel(this));
}
public void OnData(TradeBars data)
{
var openOrders = Transactions.GetOpenOrders(_spy);
if (openOrders.Count != 0) return;
if (Time.Day > 10 && _security.Holdings.Quantity <= 0)
{
var quantity = CalculateOrderQuantity(_spy, .5m);
Log($"MarketOrder: {quantity}");
MarketOrder(_spy, quantity, asynchronous: true); // async needed for partial fill market orders
}
else if (Time.Day > 20 && _security.Holdings.Quantity >= 0)
{
var quantity = CalculateOrderQuantity(_spy, -.5m);
Log($"MarketOrder: {quantity}");
MarketOrder(_spy, quantity, asynchronous: true); // async needed for partial fill market orders
}
}
public class CustomFillModel : ImmediateFillModel
{
private readonly QCAlgorithm _algorithm;
private readonly Random _random = new Random(387510346); // seed it for reproducibility
private readonly Dictionary<long, decimal> _absoluteRemainingByOrderId = new Dictionary<long, decimal>();
public CustomFillModel(QCAlgorithm algorithm)
{
_algorithm = algorithm;
}
public override OrderEvent MarketFill(Security asset, MarketOrder order)
{
// this model randomly fills market orders
decimal absoluteRemaining;
if (!_absoluteRemainingByOrderId.TryGetValue(order.Id, out absoluteRemaining))
{
absoluteRemaining = order.AbsoluteQuantity;
_absoluteRemainingByOrderId.Add(order.Id, order.AbsoluteQuantity);
}
var fill = base.MarketFill(asset, order);
var absoluteFillQuantity = (int) (Math.Min(absoluteRemaining, _random.Next(0, 2*(int)order.AbsoluteQuantity)));
fill.FillQuantity = Math.Sign(order.Quantity) * absoluteFillQuantity;
if (absoluteRemaining == absoluteFillQuantity)
{
fill.Status = OrderStatus.Filled;
_absoluteRemainingByOrderId.Remove(order.Id);
}
else
{
absoluteRemaining = absoluteRemaining - absoluteFillQuantity;
_absoluteRemainingByOrderId[order.Id] = absoluteRemaining;
fill.Status = OrderStatus.PartiallyFilled;
}
_algorithm.Log($"CustomFillModel: {fill}");
return fill;
}
}
public class CustomFeeModel : FeeModel
{
private readonly QCAlgorithm _algorithm;
public CustomFeeModel(QCAlgorithm algorithm)
{
_algorithm = algorithm;
}
public override OrderFee GetOrderFee(OrderFeeParameters parameters)
{
// custom fee math
var fee = Math.Max(
1m,
parameters.Security.Price*parameters.Order.AbsoluteQuantity*0.00001m);
_algorithm.Log($"CustomFeeModel: {fee}");
return new OrderFee(new CashAmount(fee, "USD"));
}
}
public class CustomSlippageModel : ISlippageModel
{
private readonly QCAlgorithm _algorithm;
public CustomSlippageModel(QCAlgorithm algorithm)
{
_algorithm = algorithm;
}
public decimal GetSlippageApproximation(Security asset, Order order)
{
// custom slippage math
var slippage = asset.Price*0.0001m*(decimal) Math.Log10(2*(double) order.AbsoluteQuantity);
_algorithm.Log($"CustomSlippageModel: {slippage}");
return slippage;
}
}
public class CustomBuyingPowerModel : BuyingPowerModel
{
private readonly QCAlgorithm _algorithm;
public CustomBuyingPowerModel(QCAlgorithm algorithm)
{
_algorithm = algorithm;
}
public override HasSufficientBuyingPowerForOrderResult HasSufficientBuyingPowerForOrder(
HasSufficientBuyingPowerForOrderParameters parameters)
{
// custom behavior: this model will assume that there is always enough buying power
var hasSufficientBuyingPowerForOrderResult = new HasSufficientBuyingPowerForOrderResult(true);
_algorithm.Log($"CustomBuyingPowerModel: {hasSufficientBuyingPowerForOrderResult.IsSufficient}");
return hasSufficientBuyingPowerForOrderResult;
}
}
/// <summary>
/// This is used by the regression test system to indicate if the open source Lean repository has the required data to run this algorithm.
/// </summary>
public bool CanRunLocally { get; } = true;
/// <summary>
/// This is used by the regression test system to indicate which languages this algorithm is written in.
/// </summary>
public Language[] Languages { get; } = { Language.CSharp, Language.Python };
/// <summary>
/// This is used by the regression test system to indicate what the expected statistics are from running the algorithm
/// </summary>
public Dictionary<string, string> ExpectedStatistics => new Dictionary<string, string>
{
{"Total Trades", "62"},
{"Average Win", "0.11%"},
{"Average Loss", "-0.06%"},
{"Compounding Annual Return", "-7.236%"},
{"Drawdown", "2.400%"},
{"Expectancy", "-0.187"},
{"Net Profit", "-0.629%"},
{"Sharpe Ratio", "-1.281"},
{"Probabilistic Sharpe Ratio", "21.874%"},
{"Loss Rate", "70%"},
{"Win Rate", "30%"},
{"Profit-Loss Ratio", "1.73"},
{"Alpha", "-0.096"},
{"Beta", "0.122"},
{"Annual Standard Deviation", "0.04"},
{"Annual Variance", "0.002"},
{"Information Ratio", "-4.126"},
{"Tracking Error", "0.102"},
{"Treynor Ratio", "-0.417"},
{"Total Fees", "$62.25"},
{"Estimated Strategy Capacity", "$52000000.00"},
{"Lowest Capacity Asset", "SPY R735QTJ8XC9X"},
{"Fitness Score", "0.16"},
{"Kelly Criterion Estimate", "0"},
{"Kelly Criterion Probability Value", "0"},
{"Sortino Ratio", "-2.59"},
{"Return Over Maximum Drawdown", "-3.337"},
{"Portfolio Turnover", "2.562"},
{"Total Insights Generated", "0"},
{"Total Insights Closed", "0"},
{"Total Insights Analysis Completed", "0"},
{"Long Insight Count", "0"},
{"Short Insight Count", "0"},
{"Long/Short Ratio", "100%"},
{"Estimated Monthly Alpha Value", "$0"},
{"Total Accumulated Estimated Alpha Value", "$0"},
{"Mean Population Estimated Insight Value", "$0"},
{"Mean Population Direction", "0%"},
{"Mean Population Magnitude", "0%"},
{"Rolling Averaged Population Direction", "0%"},
{"Rolling Averaged Population Magnitude", "0%"},
{"OrderListHash", "1118fb362bfe261323a6b496d50bddde"}
};
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using MySql.Data.MySqlClient;
using OpenMetaverse;
using System;
using System.Collections.Generic;
using System.Data;
namespace OpenSim.Data.MySQL
{
/// <summary>
/// A MySQL Interface for the Asset Server
/// </summary>
public class MySQLXInventoryData : IXInventoryData
{
private MySqlFolderHandler m_Folders;
private MySqlItemHandler m_Items;
public MySQLXInventoryData(string conn, string realm)
{
m_Folders = new MySqlFolderHandler(
conn, "inventoryfolders", "InventoryStore");
m_Items = new MySqlItemHandler(
conn, "inventoryitems", String.Empty);
}
public XInventoryFolder[] GetFolders(string[] fields, string[] vals)
{
return m_Folders.Get(fields, vals);
}
public XInventoryItem[] GetItems(string[] fields, string[] vals)
{
return m_Items.Get(fields, vals);
}
public bool StoreFolder(XInventoryFolder folder)
{
if (folder.folderName.Length > 64)
folder.folderName = folder.folderName.Substring(0, 64);
return m_Folders.Store(folder);
}
public bool StoreItem(XInventoryItem item)
{
if (item.inventoryName.Length > 64)
item.inventoryName = item.inventoryName.Substring(0, 64);
if (item.inventoryDescription.Length > 128)
item.inventoryDescription = item.inventoryDescription.Substring(0, 128);
return m_Items.Store(item);
}
public bool DeleteFolders(string field, string val)
{
return m_Folders.Delete(field, val);
}
public bool DeleteFolders(string[] fields, string[] vals)
{
return m_Folders.Delete(fields, vals);
}
public bool DeleteItems(string field, string val)
{
return m_Items.Delete(field, val);
}
public bool DeleteItems(string[] fields, string[] vals)
{
return m_Items.Delete(fields, vals);
}
public bool MoveItem(string id, string newParent)
{
return m_Items.MoveItem(id, newParent);
}
public bool MoveFolder(string id, string newParent)
{
return m_Folders.MoveFolder(id, newParent);
}
public XInventoryItem[] GetActiveGestures(UUID principalID)
{
return m_Items.GetActiveGestures(principalID);
}
}
public class MySqlItemHandler : MySqlInventoryHandler<XInventoryItem>
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public MySqlItemHandler(string c, string t, string m) :
base(c, t, m)
{
}
public override bool Delete(string field, string val)
{
XInventoryItem[] retrievedItems = Get(new string[] { field }, new string[] { val });
if (retrievedItems.Length == 0)
return false;
if (!base.Delete(field, val))
return false;
// Don't increment folder version here since Delete(string, string) calls Delete(string[], string[])
// IncrementFolderVersion(retrievedItems[0].parentFolderID);
return true;
}
public override bool Delete(string[] fields, string[] vals)
{
XInventoryItem[] retrievedItems = Get(fields, vals);
if (retrievedItems.Length == 0)
return false;
if (!base.Delete(fields, vals))
return false;
HashSet<UUID> deletedItemFolderUUIDs = new HashSet<UUID>();
Array.ForEach<XInventoryItem>(retrievedItems, i => deletedItemFolderUUIDs.Add(i.parentFolderID));
foreach (UUID deletedItemFolderUUID in deletedItemFolderUUIDs)
IncrementFolderVersion(deletedItemFolderUUID);
return true;
}
public bool MoveItem(string id, string newParent)
{
XInventoryItem[] retrievedItems = Get(new string[] { "inventoryID" }, new string[] { id });
if (retrievedItems.Length == 0)
return false;
UUID oldParent = retrievedItems[0].parentFolderID;
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.CommandText = String.Format("update {0} set parentFolderID = ?ParentFolderID where inventoryID = ?InventoryID", m_Realm);
cmd.Parameters.AddWithValue("?ParentFolderID", newParent);
cmd.Parameters.AddWithValue("?InventoryID", id);
if (ExecuteNonQuery(cmd) == 0)
return false;
}
IncrementFolderVersion(oldParent);
IncrementFolderVersion(newParent);
return true;
}
public XInventoryItem[] GetActiveGestures(UUID principalID)
{
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.CommandText = String.Format("select * from inventoryitems where avatarId = ?uuid and assetType = ?type and flags & 1", m_Realm);
cmd.Parameters.AddWithValue("?uuid", principalID.ToString());
cmd.Parameters.AddWithValue("?type", (int)AssetType.Gesture);
return DoQuery(cmd);
}
}
public override bool Store(XInventoryItem item)
{
if (!base.Store(item))
return false;
IncrementFolderVersion(item.parentFolderID);
return true;
}
}
public class MySqlFolderHandler : MySqlInventoryHandler<XInventoryFolder>
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public MySqlFolderHandler(string c, string t, string m) :
base(c, t, m)
{
}
public bool MoveFolder(string id, string newParentFolderID)
{
XInventoryFolder[] folders = Get(new string[] { "folderID" }, new string[] { id });
if (folders.Length == 0)
return false;
UUID oldParentFolderUUID = folders[0].parentFolderID;
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.CommandText
= String.Format(
"update {0} set parentFolderID = ?ParentFolderID where folderID = ?folderID", m_Realm);
cmd.Parameters.AddWithValue("?ParentFolderID", newParentFolderID);
cmd.Parameters.AddWithValue("?folderID", id);
if (ExecuteNonQuery(cmd) == 0)
return false;
}
IncrementFolderVersion(oldParentFolderUUID);
IncrementFolderVersion(newParentFolderID);
return true;
}
public override bool Store(XInventoryFolder folder)
{
if (!base.Store(folder))
return false;
IncrementFolderVersion(folder.parentFolderID);
return true;
}
}
public class MySqlInventoryHandler<T> : MySQLGenericTableHandler<T> where T : class, new()
{
public MySqlInventoryHandler(string c, string t, string m)
: base(c, t, m)
{
}
protected bool IncrementFolderVersion(UUID folderID)
{
return IncrementFolderVersion(folderID.ToString());
}
protected bool IncrementFolderVersion(string folderID)
{
// m_log.DebugFormat("[MYSQL FOLDER HANDLER]: Incrementing version on folder {0}", folderID);
// Util.PrintCallStack();
using (MySqlConnection dbcon = new MySqlConnection(m_connectionString))
{
dbcon.Open();
using (MySqlCommand cmd = new MySqlCommand())
{
cmd.Connection = dbcon;
cmd.CommandText = String.Format("update inventoryfolders set version=version+1 where folderID = ?folderID");
cmd.Parameters.AddWithValue("?folderID", folderID);
try
{
cmd.ExecuteNonQuery();
}
catch (Exception)
{
return false;
}
cmd.Dispose();
}
dbcon.Close();
}
return true;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Xml;
using Microsoft.Build.Construction;
using Microsoft.Build.Evaluation;
using Microsoft.Build.Execution;
using Microsoft.Build.Framework;
using ForwardingLoggerRecord = Microsoft.Build.Logging.ForwardingLoggerRecord;
using Microsoft.Build.Shared;
using Xunit;
using Xunit.Abstractions;
namespace Microsoft.Build.UnitTests.OM.Instance
{
/// <summary>
/// Tests for ProjectInstance public members
/// </summary>
public class ProjectInstance_Tests
{
private readonly ITestOutputHelper _testOutput;
public ProjectInstance_Tests(ITestOutputHelper output)
{
_testOutput = output;
}
/// <summary>
/// Verify that a cloned off project instance can see environment variables
/// </summary>
[Fact]
public void CreateProjectInstancePassesEnvironment()
{
Project p = new Project();
ProjectInstance i = p.CreateProjectInstance();
Assert.True(i.GetPropertyValue("username") != null);
}
/// <summary>
/// Read off properties
/// </summary>
[Fact]
public void PropertiesAccessors()
{
ProjectInstance p = GetSampleProjectInstance();
Assert.Equal("v1", p.GetPropertyValue("p1"));
Assert.Equal("v2X", p.GetPropertyValue("p2"));
}
/// <summary>
/// Read off items
/// </summary>
[Fact]
public void ItemsAccessors()
{
ProjectInstance p = GetSampleProjectInstance();
IList<ProjectItemInstance> items = Helpers.MakeList(p.GetItems("i"));
Assert.Equal(3, items.Count);
Assert.Equal("i", items[0].ItemType);
Assert.Equal("i0", items[0].EvaluatedInclude);
Assert.Equal(String.Empty, items[0].GetMetadataValue("m"));
Assert.Null(items[0].GetMetadata("m"));
Assert.Equal("i1", items[1].EvaluatedInclude);
Assert.Equal("m1", items[1].GetMetadataValue("m"));
Assert.Equal("m1", items[1].GetMetadata("m").EvaluatedValue);
Assert.Equal("v1", items[2].EvaluatedInclude);
}
/// <summary>
/// Add item
/// </summary>
[Fact]
public void AddItemWithoutMetadata()
{
ProjectInstance p = GetEmptyProjectInstance();
ProjectItemInstance returned = p.AddItem("i", "i1");
Assert.Equal("i", returned.ItemType);
Assert.Equal("i1", returned.EvaluatedInclude);
Assert.False(returned.Metadata.GetEnumerator().MoveNext());
foreach (ProjectItemInstance item in p.Items)
{
Assert.Equal("i1", item.EvaluatedInclude);
Assert.False(item.Metadata.GetEnumerator().MoveNext());
}
}
/// <summary>
/// Add item
/// </summary>
[Fact]
public void AddItemWithoutMetadata_Escaped()
{
ProjectInstance p = GetEmptyProjectInstance();
ProjectItemInstance returned = p.AddItem("i", "i%3b1");
Assert.Equal("i", returned.ItemType);
Assert.Equal("i;1", returned.EvaluatedInclude);
Assert.False(returned.Metadata.GetEnumerator().MoveNext());
foreach (ProjectItemInstance item in p.Items)
{
Assert.Equal("i;1", item.EvaluatedInclude);
Assert.False(item.Metadata.GetEnumerator().MoveNext());
}
}
/// <summary>
/// Add item with metadata
/// </summary>
[Fact]
public void AddItemWithMetadata()
{
ProjectInstance p = GetEmptyProjectInstance();
var metadata = new List<KeyValuePair<string, string>>();
metadata.Add(new KeyValuePair<string, string>("m", "m1"));
metadata.Add(new KeyValuePair<string, string>("n", "n1"));
metadata.Add(new KeyValuePair<string, string>("o", "o%40"));
ProjectItemInstance returned = p.AddItem("i", "i1", metadata);
Assert.True(object.ReferenceEquals(returned, Helpers.MakeList(p.GetItems("i"))[0]));
foreach (ProjectItemInstance item in p.Items)
{
Assert.Same(returned, item);
Assert.Equal("i1", item.EvaluatedInclude);
var metadataOut = Helpers.MakeList(item.Metadata);
Assert.Equal(3, metadataOut.Count);
Assert.Equal("m1", item.GetMetadataValue("m"));
Assert.Equal("n1", item.GetMetadataValue("n"));
Assert.Equal("o@", item.GetMetadataValue("o"));
}
}
/// <summary>
/// Add item null item type
/// </summary>
[Fact]
public void AddItemInvalidNullItemType()
{
Assert.Throws<ArgumentNullException>(() =>
{
ProjectInstance p = GetEmptyProjectInstance();
p.AddItem(null, "i1");
}
);
}
/// <summary>
/// Add item empty item type
/// </summary>
[Fact]
public void AddItemInvalidEmptyItemType()
{
Assert.Throws<ArgumentException>(() =>
{
ProjectInstance p = GetEmptyProjectInstance();
p.AddItem(String.Empty, "i1");
}
);
}
/// <summary>
/// Add item null include
/// </summary>
[Fact]
public void AddItemInvalidNullInclude()
{
Assert.Throws<ArgumentNullException>(() =>
{
ProjectInstance p = GetEmptyProjectInstance();
p.AddItem("i", null);
}
);
}
/// <summary>
/// Add item null metadata
/// </summary>
[Fact]
public void AddItemNullMetadata()
{
ProjectInstance p = GetEmptyProjectInstance();
ProjectItemInstance item = p.AddItem("i", "i1", null);
Assert.False(item.Metadata.GetEnumerator().MoveNext());
}
/// <summary>
/// It's okay to set properties that are also global properties, masking their value
/// </summary>
[Fact]
public void SetGlobalPropertyOnInstance()
{
Dictionary<string, string> globals = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase) { { "p", "p1" } };
Project p = new Project(ProjectRootElement.Create(), globals, null);
ProjectInstance instance = p.CreateProjectInstance();
instance.SetProperty("p", "p2");
Assert.Equal("p2", instance.GetPropertyValue("p"));
// And clearing it should not expose the original global property value
instance.SetProperty("p", "");
Assert.Equal("", instance.GetPropertyValue("p"));
}
/// <summary>
/// ProjectInstance itself is cloned properly
/// </summary>
[Fact]
public void CloneProjectItself()
{
ProjectInstance first = GetSampleProjectInstance();
ProjectInstance second = first.DeepCopy();
Assert.False(Object.ReferenceEquals(first, second));
}
/// <summary>
/// Properties are cloned properly
/// </summary>
[Fact]
public void CloneProperties()
{
ProjectInstance first = GetSampleProjectInstance();
ProjectInstance second = first.DeepCopy();
Assert.False(Object.ReferenceEquals(first.GetProperty("p1"), second.GetProperty("p1")));
ProjectPropertyInstance newProperty = first.SetProperty("p1", "v1b");
Assert.True(Object.ReferenceEquals(newProperty, first.GetProperty("p1")));
Assert.Equal("v1b", first.GetPropertyValue("p1"));
Assert.Equal("v1", second.GetPropertyValue("p1"));
}
/// <summary>
/// Passing an item list into another list should copy the metadata too
/// </summary>
[Fact]
public void ItemEvaluationCopiesMetadata()
{
string content = @"
<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' >
<ItemGroup>
<i Include='i1'>
<m>m1</m>
<n>n%3b%3b</n>
</i>
<j Include='@(i)'/>
</ItemGroup>
</Project>";
ProjectInstance project = GetProjectInstance(content);
Assert.Single(Helpers.MakeList(project.GetItems("j")));
Assert.Equal("i1", Helpers.MakeList(project.GetItems("j"))[0].EvaluatedInclude);
Assert.Equal("m1", Helpers.MakeList(project.GetItems("j"))[0].GetMetadataValue("m"));
Assert.Equal("n;;", Helpers.MakeList(project.GetItems("j"))[0].GetMetadataValue("n"));
}
/// <summary>
/// Wildcards are expanded in item groups inside targets, and the evaluatedinclude
/// is not the wildcard itself!
/// </summary>
[Fact]
[Trait("Category", "serialize")]
public void WildcardsInsideTargets()
{
string directory = null;
string file1 = null;
string file2 = null;
string file3 = null;
try
{
directory = Path.Combine(Path.GetTempPath(), "WildcardsInsideTargets");
Directory.CreateDirectory(directory);
file1 = Path.Combine(directory, "a.exe");
file2 = Path.Combine(directory, "b.exe");
file3 = Path.Combine(directory, "c.bat");
File.WriteAllText(file1, String.Empty);
File.WriteAllText(file2, String.Empty);
File.WriteAllText(file3, String.Empty);
string path = Path.Combine(directory, "*.exe");
string content = @"
<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' >
<Target Name='t'>
<ItemGroup>
<i Include='" + path + @"'/>
</ItemGroup>
</Target>
</Project>";
ProjectInstance projectInstance = GetProjectInstance(content);
projectInstance.Build();
Assert.Equal(2, Helpers.MakeList(projectInstance.GetItems("i")).Count);
Assert.Equal(file1, Helpers.MakeList(projectInstance.GetItems("i"))[0].EvaluatedInclude);
Assert.Equal(file2, Helpers.MakeList(projectInstance.GetItems("i"))[1].EvaluatedInclude);
}
finally
{
File.Delete(file1);
File.Delete(file2);
File.Delete(file3);
FileUtilities.DeleteWithoutTrailingBackslash(directory);
}
}
/// <summary>
/// Items are cloned properly
/// </summary>
[Fact]
public void CloneItems()
{
ProjectInstance first = GetSampleProjectInstance();
ProjectInstance second = first.DeepCopy();
Assert.False(Object.ReferenceEquals(Helpers.MakeList(first.GetItems("i"))[0], Helpers.MakeList(second.GetItems("i"))[0]));
first.AddItem("i", "i3");
Assert.Equal(4, Helpers.MakeList(first.GetItems("i")).Count);
Assert.Equal(3, Helpers.MakeList(second.GetItems("i")).Count);
}
/// <summary>
/// Null target in array should give ArgumentNullException
/// </summary>
[Fact]
public void BuildNullTargetInArray()
{
Assert.Throws<ArgumentNullException>(() =>
{
ProjectInstance instance = new ProjectInstance(ProjectRootElement.Create());
instance.Build(new string[] { null }, null);
}
);
}
/// <summary>
/// Null logger in array should give ArgumentNullException
/// </summary>
[Fact]
public void BuildNullLoggerInArray()
{
Assert.Throws<ArgumentNullException>(() =>
{
ProjectInstance instance = new ProjectInstance(ProjectRootElement.Create());
instance.Build("t", new ILogger[] { null });
}
);
}
/// <summary>
/// Null remote logger in array should give ArgumentNullException
/// </summary>
[Fact]
public void BuildNullRemoteLoggerInArray()
{
Assert.Throws<ArgumentNullException>(() =>
{
ProjectInstance instance = new ProjectInstance(ProjectRootElement.Create());
instance.Build("t", null, new ForwardingLoggerRecord[] { null });
}
);
}
/// <summary>
/// Null target name should imply the default target
/// </summary>
[Fact]
[Trait("Category", "serialize")]
public void BuildNullTargetNameIsDefaultTarget()
{
ProjectRootElement xml = ProjectRootElement.Create();
xml.AddTarget("t").AddTask("Message").SetParameter("Text", "[OK]");
ProjectInstance instance = new ProjectInstance(xml);
MockLogger logger = new MockLogger();
string target = null;
instance.Build(target, new ILogger[] { logger });
logger.AssertLogContains("[OK]");
}
/// <summary>
/// Build system should correctly reset itself between builds of
/// project instances.
/// </summary>
[Fact]
[Trait("Category", "serialize")]
public void BuildProjectInstancesConsecutively()
{
ProjectInstance instance1 = new Project().CreateProjectInstance();
BuildRequestData buildRequestData1 = new BuildRequestData(instance1, new string[] { });
BuildManager.DefaultBuildManager.Build(new BuildParameters(), buildRequestData1);
new Project().CreateProjectInstance();
BuildRequestData buildRequestData2 = new BuildRequestData(instance1, new string[] { });
BuildManager.DefaultBuildManager.Build(new BuildParameters(), buildRequestData2);
}
/// <summary>
/// Verifies that the built-in metadata for specialized ProjectInstances is present when items are the simplest (no macros or wildcards).
/// </summary>
[Fact]
public void CreateProjectInstanceWithItemsContainingProjects()
{
const string CapturedMetadataName = "DefiningProjectFullPath";
var pc = new ProjectCollection();
var projA = ProjectRootElement.Create(pc);
var projB = ProjectRootElement.Create(pc);
projA.FullPath = Path.Combine(Path.GetTempPath(), "a.proj");
projB.FullPath = Path.Combine(Path.GetTempPath(), "b.proj");
projB.AddImport("a.proj");
projA.AddItem("Compile", "aItem.cs");
projB.AddItem("Compile", "bItem.cs");
var projBEval = new Project(projB, null, null, pc);
var projBInstance = projBEval.CreateProjectInstance();
var projBInstanceItem = projBInstance.GetItemsByItemTypeAndEvaluatedInclude("Compile", "bItem.cs").Single();
var projAInstanceItem = projBInstance.GetItemsByItemTypeAndEvaluatedInclude("Compile", "aItem.cs").Single();
Assert.Equal(projB.FullPath, projBInstanceItem.GetMetadataValue(CapturedMetadataName));
Assert.Equal(projA.FullPath, projAInstanceItem.GetMetadataValue(CapturedMetadataName));
// Although GetMetadataValue returns non-null, GetMetadata returns null...
Assert.Null(projAInstanceItem.GetMetadata(CapturedMetadataName));
// .. Just like built-in metadata does: (this segment just demonstrates similar functionality -- it's not meant to test built-in metadata)
Assert.NotNull(projAInstanceItem.GetMetadataValue("Identity"));
Assert.Null(projAInstanceItem.GetMetadata("Identity"));
Assert.True(projAInstanceItem.HasMetadata(CapturedMetadataName));
Assert.False(projAInstanceItem.Metadata.Any());
Assert.Contains(CapturedMetadataName, projAInstanceItem.MetadataNames);
Assert.Equal(projAInstanceItem.MetadataCount, projAInstanceItem.MetadataNames.Count);
}
/// <summary>
/// Verifies that the built-in metadata for specialized ProjectInstances is present when items are based on wildcards in the construction model.
/// </summary>
[Fact]
public void DefiningProjectItemBuiltInMetadataFromWildcards()
{
const string CapturedMetadataName = "DefiningProjectFullPath";
var pc = new ProjectCollection();
var projA = ProjectRootElement.Create(pc);
var projB = ProjectRootElement.Create(pc);
string tempDir = Path.GetTempFileName();
File.Delete(tempDir);
Directory.CreateDirectory(tempDir);
File.Create(Path.Combine(tempDir, "aItem.cs")).Dispose();
projA.FullPath = Path.Combine(tempDir, "a.proj");
projB.FullPath = Path.Combine(tempDir, "b.proj");
projB.AddImport("a.proj");
projA.AddItem("Compile", "*.cs");
projB.AddItem("CompileB", "@(Compile)");
var projBEval = new Project(projB, null, null, pc);
var projBInstance = projBEval.CreateProjectInstance();
var projAInstanceItem = projBInstance.GetItemsByItemTypeAndEvaluatedInclude("Compile", "aItem.cs").Single();
var projBInstanceItem = projBInstance.GetItemsByItemTypeAndEvaluatedInclude("CompileB", "aItem.cs").Single();
Assert.Equal(projA.FullPath, projAInstanceItem.GetMetadataValue(CapturedMetadataName));
Assert.Equal(projB.FullPath, projBInstanceItem.GetMetadataValue(CapturedMetadataName));
Assert.True(projAInstanceItem.HasMetadata(CapturedMetadataName));
Assert.False(projAInstanceItem.Metadata.Any());
Assert.Contains(CapturedMetadataName, projAInstanceItem.MetadataNames);
Assert.Equal(projAInstanceItem.MetadataCount, projAInstanceItem.MetadataNames.Count);
}
/// <summary>
/// Validate that the DefiningProject* metadata is set to the correct project based on a variety
/// of means of item creation.
/// </summary>
[Fact]
public void TestDefiningProjectMetadata()
{
string projectA = Path.Combine(ObjectModelHelpers.TempProjectDir, "a.proj");
string projectB = Path.Combine(ObjectModelHelpers.TempProjectDir, "b.proj");
string includeFileA = Path.Combine(ObjectModelHelpers.TempProjectDir, "aaa4.cs");
string includeFileB = Path.Combine(ObjectModelHelpers.TempProjectDir, "bbb4.cs");
string contentsA =
@"<?xml version=`1.0` encoding=`utf-8`?>
<Project ToolsVersion=`msbuilddefaulttoolsversion` DefaultTargets=`Validate` xmlns=`msbuildnamespace`>
<ItemGroup>
<A Include=`aaaa.cs` />
<A2 Include=`aaa2.cs` />
<A2 Include=`aaa3.cs`>
<Foo>Bar</Foo>
</A2>
</ItemGroup>
<Import Project=`b.proj` />
<ItemGroup>
<E Include=`@(C)` />
<F Include=`@(C);@(C2)` />
<G Include=`@(C->'%(Filename)')` />
<H Include=`@(C2->WithMetadataValue('Foo', 'Bar'))` />
<U Include=`*4.cs` />
</ItemGroup>
<Target Name=`AddFromMainProject`>
<ItemGroup>
<B Include=`bbbb.cs` />
<I Include=`@(C)` />
<J Include=`@(C);@(C2)` />
<K Include=`@(C->'%(Filename)')` />
<L Include=`@(C2->WithMetadataValue('Foo', 'Bar'))` />
<V Include=`*4.cs` />
</ItemGroup>
</Target>
<Target Name=`Validate` DependsOnTargets=`AddFromMainProject;AddFromImport`>
<Warning Text=`A is wrong: EXPECTED: [a] ACTUAL: [%(A.DefiningProjectName)]` Condition=`'%(A.DefiningProjectName)' != 'a'` />
<Warning Text=`B is wrong: EXPECTED: [a] ACTUAL: [%(B.DefiningProjectName)]` Condition=`'%(B.DefiningProjectName)' != 'a'` />
<Warning Text=`C is wrong: EXPECTED: [b] ACTUAL: [%(C.DefiningProjectName)]` Condition=`'%(C.DefiningProjectName)' != 'b'` />
<Warning Text=`D is wrong: EXPECTED: [b] ACTUAL: [%(D.DefiningProjectName)]` Condition=`'%(D.DefiningProjectName)' != 'b'` />
<Warning Text=`E is wrong: EXPECTED: [a] ACTUAL: [%(E.DefiningProjectName)]` Condition=`'%(E.DefiningProjectName)' != 'a'` />
<Warning Text=`F is wrong: EXPECTED: [a] ACTUAL: [%(F.DefiningProjectName)]` Condition=`'%(F.DefiningProjectName)' != 'a'` />
<Warning Text=`G is wrong: EXPECTED: [a] ACTUAL: [%(G.DefiningProjectName)]` Condition=`'%(G.DefiningProjectName)' != 'a'` />
<Warning Text=`H is wrong: EXPECTED: [a] ACTUAL: [%(H.DefiningProjectName)]` Condition=`'%(H.DefiningProjectName)' != 'a'` />
<Warning Text=`I is wrong: EXPECTED: [a] ACTUAL: [%(I.DefiningProjectName)]` Condition=`'%(I.DefiningProjectName)' != 'a'` />
<Warning Text=`J is wrong: EXPECTED: [a] ACTUAL: [%(J.DefiningProjectName)]` Condition=`'%(J.DefiningProjectName)' != 'a'` />
<Warning Text=`K is wrong: EXPECTED: [a] ACTUAL: [%(K.DefiningProjectName)]` Condition=`'%(K.DefiningProjectName)' != 'a'` />
<Warning Text=`L is wrong: EXPECTED: [a] ACTUAL: [%(L.DefiningProjectName)]` Condition=`'%(L.DefiningProjectName)' != 'a'` />
<Warning Text=`M is wrong: EXPECTED: [b] ACTUAL: [%(M.DefiningProjectName)]` Condition=`'%(M.DefiningProjectName)' != 'b'` />
<Warning Text=`N is wrong: EXPECTED: [b] ACTUAL: [%(N.DefiningProjectName)]` Condition=`'%(N.DefiningProjectName)' != 'b'` />
<Warning Text=`O is wrong: EXPECTED: [b] ACTUAL: [%(O.DefiningProjectName)]` Condition=`'%(O.DefiningProjectName)' != 'b'` />
<Warning Text=`P is wrong: EXPECTED: [b] ACTUAL: [%(P.DefiningProjectName)]` Condition=`'%(P.DefiningProjectName)' != 'b'` />
<Warning Text=`Q is wrong: EXPECTED: [b] ACTUAL: [%(Q.DefiningProjectName)]` Condition=`'%(Q.DefiningProjectName)' != 'b'` />
<Warning Text=`R is wrong: EXPECTED: [b] ACTUAL: [%(R.DefiningProjectName)]` Condition=`'%(R.DefiningProjectName)' != 'b'` />
<Warning Text=`S is wrong: EXPECTED: [b] ACTUAL: [%(S.DefiningProjectName)]` Condition=`'%(S.DefiningProjectName)' != 'b'` />
<Warning Text=`T is wrong: EXPECTED: [b] ACTUAL: [%(T.DefiningProjectName)]` Condition=`'%(T.DefiningProjectName)' != 'b'` />
<Warning Text=`U is wrong: EXPECTED: [a] ACTUAL: [%(U.DefiningProjectName)]` Condition=`'%(U.DefiningProjectName)' != 'a'` />
<Warning Text=`V is wrong: EXPECTED: [a] ACTUAL: [%(V.DefiningProjectName)]` Condition=`'%(V.DefiningProjectName)' != 'a'` />
<Warning Text=`W is wrong: EXPECTED: [b] ACTUAL: [%(W.DefiningProjectName)]` Condition=`'%(W.DefiningProjectName)' != 'b'` />
<Warning Text=`X is wrong: EXPECTED: [b] ACTUAL: [%(X.DefiningProjectName)]` Condition=`'%(X.DefiningProjectName)' != 'b'` />
</Target>
</Project>";
string contentsB =
@"<?xml version=`1.0` encoding=`utf-8`?>
<Project ToolsVersion=`msbuilddefaulttoolsversion` xmlns=`msbuildnamespace`>
<ItemGroup>
<C Include=`cccc.cs` />
<C2 Include=`ccc2.cs` />
<C2 Include=`ccc3.cs`>
<Foo>Bar</Foo>
</C2>
<M Include=`@(A)` />
<N Include=`@(A);@(A2)` />
<O Include=`@(A->'%(Filename)')` />
<P Include=`@(A2->WithMetadataValue('Foo', 'Bar'))` />
<W Include=`*4.cs` />
</ItemGroup>
<Target Name=`AddFromImport`>
<ItemGroup>
<D Include=`dddd.cs` />
<Q Include=`@(A)` />
<R Include=`@(A);@(A2)` />
<S Include=`@(A->'%(Filename)')` />
<T Include=`@(A2->WithMetadataValue('Foo', 'Bar'))` />
<X Include=`*4.cs` />
</ItemGroup>
</Target>
</Project>";
try
{
File.WriteAllText(projectA, ObjectModelHelpers.CleanupFileContents(contentsA));
File.WriteAllText(projectB, ObjectModelHelpers.CleanupFileContents(contentsB));
File.WriteAllText(includeFileA, "aaaaaaa");
File.WriteAllText(includeFileB, "bbbbbbb");
MockLogger logger = new MockLogger(_testOutput);
ObjectModelHelpers.BuildTempProjectFileExpectSuccess("a.proj", logger);
logger.AssertNoWarnings();
}
finally
{
if (File.Exists(projectA))
{
File.Delete(projectA);
}
if (File.Exists(projectB))
{
File.Delete(projectB);
}
if (File.Exists(includeFileA))
{
File.Delete(includeFileA);
}
if (File.Exists(includeFileB))
{
File.Delete(includeFileB);
}
}
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetProperty()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.SetProperty("a", "b"); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_RemoveProperty()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.RemoveProperty("p1"); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_RemoveItem()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.RemoveItem(Helpers.GetFirst(instance.Items)); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_AddItem()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.AddItem("a", "b"); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_AddItemWithMetadata()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.AddItem("a", "b", new List<KeyValuePair<string, string>>()); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_Build()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.Build(); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetEvaluatedInclude()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).EvaluatedInclude = "x"; });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetEvaluatedIncludeEscaped()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { ((ITaskItem2)Helpers.GetFirst(instance.Items)).EvaluatedIncludeEscaped = "x"; });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetItemSpec()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { ((ITaskItem2)Helpers.GetFirst(instance.Items)).ItemSpec = "x"; });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetMetadataOnItem1()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { ((ITaskItem2)Helpers.GetFirst(instance.Items)).SetMetadataValueLiteral("a", "b"); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetMetadataOnItem2()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).SetMetadata(new List<KeyValuePair<string, string>>()); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetMetadataOnItem3()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).SetMetadata("a", "b"); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_RemoveMetadataFromItem()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).RemoveMetadata("n"); });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetEvaluatedValueOnProperty()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Properties).EvaluatedValue = "v2"; });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetEvaluatedValueOnPropertyFromProject()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.GetProperty("p1").EvaluatedValue = "v2"; });
}
/// <summary>
/// Test operation fails on immutable project instance
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetNewProperty()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.SetProperty("newproperty", "v2"); });
}
/// <summary>
/// Setting global properties should fail if the project is immutable, even though the property
/// was originally created as mutable
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetGlobalProperty()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.SetProperty("g", "gv2"); });
}
/// <summary>
/// Setting environment originating properties should fail if the project is immutable, even though the property
/// was originally created as mutable
/// </summary>
[Fact]
public void ImmutableProjectInstance_SetEnvironmentProperty()
{
var instance = GetSampleProjectInstance(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.SetProperty("username", "someone_else_here"); });
}
/// <summary>
/// Cloning inherits unless otherwise specified
/// </summary>
[Fact]
public void ImmutableProjectInstance_CloneMutableFromImmutable()
{
var protoInstance = GetSampleProjectInstance(true /* immutable */);
var instance = protoInstance.DeepCopy(false /* mutable */);
// These should not throw
instance.SetProperty("p", "pnew");
instance.AddItem("i", "ii");
Helpers.GetFirst(instance.Items).EvaluatedInclude = "new";
instance.SetProperty("g", "gnew");
instance.SetProperty("username", "someone_else_here");
}
/// <summary>
/// Cloning inherits unless otherwise specified
/// </summary>
[Fact]
[Trait("Category", "netcore-osx-failing")]
[Trait("Category", "netcore-linux-failing")]
public void ImmutableProjectInstance_CloneImmutableFromMutable()
{
var protoInstance = GetSampleProjectInstance(false /* mutable */);
var instance = protoInstance.DeepCopy(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.GetProperty("g").EvaluatedValue = "v2"; });
Helpers.VerifyAssertThrowsInvalidOperation(
delegate
{
instance.GetProperty(NativeMethodsShared.IsWindows ? "username" : "USER").EvaluatedValue =
"someone_else_here";
});
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Properties).EvaluatedValue = "v2"; });
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).EvaluatedInclude = "new"; });
}
/// <summary>
/// Cloning inherits unless otherwise specified
/// </summary>
[Fact]
[Trait("Category", "netcore-osx-failing")]
[Trait("Category", "netcore-linux-failing")]
public void ImmutableProjectInstance_CloneImmutableFromImmutable()
{
var protoInstance = GetSampleProjectInstance(true /* immutable */);
var instance = protoInstance.DeepCopy(/* inherit */);
// Should not have bothered cloning
Assert.True(Object.ReferenceEquals(protoInstance, instance));
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.GetProperty("g").EvaluatedValue = "v2"; });
Helpers.VerifyAssertThrowsInvalidOperation(
delegate
{
instance.GetProperty(NativeMethodsShared.IsWindows ? "username" : "USER").EvaluatedValue =
"someone_else_here";
});
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Properties).EvaluatedValue = "v2"; });
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).EvaluatedInclude = "new"; });
}
/// <summary>
/// Cloning inherits unless otherwise specified
/// </summary>
[Fact]
[Trait("Category", "netcore-osx-failing")]
[Trait("Category", "netcore-linux-failing")]
public void ImmutableProjectInstance_CloneImmutableFromImmutable2()
{
var protoInstance = GetSampleProjectInstance(true /* immutable */);
var instance = protoInstance.DeepCopy(true /* immutable */);
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { instance.GetProperty("g").EvaluatedValue = "v2"; });
Helpers.VerifyAssertThrowsInvalidOperation(
delegate
{
instance.GetProperty(NativeMethodsShared.IsWindows ? "username" : "USER").EvaluatedValue =
"someone_else_here";
});
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Properties).EvaluatedValue = "v2"; });
Helpers.VerifyAssertThrowsInvalidOperation(delegate () { Helpers.GetFirst(instance.Items).EvaluatedInclude = "new"; });
}
/// <summary>
/// Cloning inherits unless otherwise specified
/// </summary>
[Fact]
[Trait("Category", "netcore-osx-failing")]
[Trait("Category", "netcore-linux-failing")]
public void ImmutableProjectInstance_CloneMutableFromMutable()
{
var protoInstance = GetSampleProjectInstance(false /* mutable */);
var instance = protoInstance.DeepCopy(/* inherit */);
// These should not throw
instance.SetProperty("p", "pnew");
instance.AddItem("i", "ii");
Helpers.GetFirst(instance.Items).EvaluatedInclude = "new";
instance.SetProperty("g", "gnew");
instance.SetProperty("username", "someone_else_here");
}
/// <summary>
/// Cloning inherits unless otherwise specified
/// </summary>
[Fact]
public void ImmutableProjectInstance_CloneMutableFromMutable2()
{
var protoInstance = GetSampleProjectInstance(false /* mutable */);
var instance = protoInstance.DeepCopy(false /* mutable */);
// These should not throw
instance.SetProperty("p", "pnew");
instance.AddItem("i", "ii");
Helpers.GetFirst(instance.Items).EvaluatedInclude = "new";
instance.SetProperty("g", "gnew");
instance.SetProperty("username", "someone_else_here");
}
/// <summary>
/// Create a ProjectInstance with some items and properties and targets
/// </summary>
private static ProjectInstance GetSampleProjectInstance(bool isImmutable = false)
{
string content = @"
<Project xmlns='http://schemas.microsoft.com/developer/msbuild/2003' >
<ItemDefinitionGroup>
<i>
<n>n1</n>
</i>
</ItemDefinitionGroup>
<PropertyGroup>
<p1>v1</p1>
<p2>v2</p2>
<p2>$(p2)X$(p)</p2>
</PropertyGroup>
<ItemGroup>
<i Include='i0'/>
<i Include='i1'>
<m>m1</m>
</i>
<i Include='$(p1)'/>
</ItemGroup>
<Target Name='t'>
<t1 a='a1' b='b1' ContinueOnError='coe' Condition='c'/>
<t2/>
</Target>
<Target Name='tt'/>
</Project>
";
ProjectInstance p = GetProjectInstance(content, isImmutable);
return p;
}
/// <summary>
/// Create a ProjectInstance from provided project content
/// </summary>
private static ProjectInstance GetProjectInstance(string content, bool immutable = false)
{
var globalProperties = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
globalProperties["g"] = "gv";
Project project = new Project(XmlReader.Create(new StringReader(content)), globalProperties, ObjectModelHelpers.MSBuildDefaultToolsVersion);
ProjectInstance instance = immutable ? project.CreateProjectInstance(ProjectInstanceSettings.Immutable) : project.CreateProjectInstance();
return instance;
}
/// <summary>
/// Create a ProjectInstance that's empty
/// </summary>
private static ProjectInstance GetEmptyProjectInstance()
{
ProjectRootElement xml = ProjectRootElement.Create();
Project project = new Project(xml);
ProjectInstance instance = project.CreateProjectInstance();
return instance;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Text.RegularExpressions;
namespace MongoDB
{
/// <summary>
/// </summary>
[Serializable]
public class MongoConnectionStringBuilder
{
/// <summary>
/// </summary>
public const string DefaultDatabase = "admin";
/// <summary>
/// </summary>
public const int DefaultMaximumPoolSize = 100;
/// <summary>
/// </summary>
public const int DefaultMinimumPoolSize = 0;
/// <summary>
/// </summary>
public const bool DefaultPooled = true;
/// <summary>
/// </summary>
public static readonly TimeSpan DefaultConnectionLifeTime = TimeSpan.Zero;
/// <summary>
/// </summary>
public static readonly TimeSpan DefaultConnectionTimeout = TimeSpan.FromSeconds(15);
private static readonly Regex PairRegex = new Regex(@"^\s*(.*)\s*=\s*(.*)\s*$");
private static readonly Regex ServerRegex = new Regex(@"^\s*([^:]+)(?::(\d+))?\s*$");
private static readonly Regex UriRegex = new Regex(@"^mongodb://(?:([^:]*):([^@]*)@)?([^/]*)(?:/([^?]*))?(?:\?(.*))?$");
private readonly List<MongoServerEndPoint> _servers = new List<MongoServerEndPoint>();
/// <summary>
/// Initializes a new instance of the
/// <see cref = "MongoConnectionStringBuilder" />
/// class. Uses the default server connection when
/// no server is added.
/// </summary>
public MongoConnectionStringBuilder()
{
ConnectionLifetime = DefaultConnectionLifeTime;
ConnectionTimeout = DefaultConnectionTimeout;
MaximumPoolSize = DefaultMaximumPoolSize;
MinimumPoolSize = DefaultMinimumPoolSize;
Pooled = DefaultPooled;
Database = DefaultDatabase;
}
/// <summary>
/// Initializes a new instance of the
/// <see cref = "MongoConnectionStringBuilder" />
/// class. Uses the default server connection when
/// no server is added.
/// </summary>
/// <param name = "connectionString">The connection string.</param>
public MongoConnectionStringBuilder(string connectionString)
: this()
{
if(!string.IsNullOrEmpty(connectionString))
if(connectionString.StartsWith("mongodb://"))
ParseUri(connectionString);
else
Parse(connectionString);
}
/// <summary>
/// Gets the servers.
/// </summary>
/// <value>The servers.</value>
public MongoServerEndPoint[] Servers
{
get { return _servers.Count == 0 ? new[] {MongoServerEndPoint.Default} : _servers.ToArray(); }
}
/// <summary>
/// Gets or sets the password.
/// </summary>
/// <value>The password.</value>
public string Password { get; set; }
/// <summary>
/// Gets or sets the username.
/// </summary>
/// <value>The username.</value>
public string Username { get; set; }
/// <summary>
/// Gets or sets the maximum size of the connection pool.
/// </summary>
/// <value>The maximum size of the pool.</value>
public int MaximumPoolSize { get; set; }
/// <summary>
/// Gets or sets the size of the minimum connection pool.
/// </summary>
/// <value>The size of the minimal pool.</value>
public int MinimumPoolSize { get; set; }
/// <summary>
/// Gets or sets the connection lifetime in connection pool.
/// </summary>
/// <value>The connection lifetime.</value>
public TimeSpan ConnectionLifetime { get; set; }
/// <summary>
/// Gets or sets the connection timeout.
/// </summary>
/// <value>The connection timeout.</value>
public TimeSpan ConnectionTimeout { get; set; }
/// <summary>
/// Gets or sets a value indicating whether connection is pooled.
/// </summary>
/// <value><c>true</c> if pooled; otherwise, <c>false</c>.</value>
public bool Pooled { get; set; }
/// <summary>
/// Gets or sets the database.
/// </summary>
/// <remarks>
/// Is only used when passing directly constructing MongoDatabase instance.
/// </remarks>
/// <value>The database.</value>
public string Database { get; set; }
/// <summary>
/// Parses the URI.
/// </summary>
/// <param name = "connectionString">The connection string.</param>
private void ParseUri(string connectionString)
{
if(connectionString == null)
throw new ArgumentNullException("connectionString");
var uriMatch = UriRegex.Match(connectionString);
if(!uriMatch.Success)
throw new FormatException(string.Format("Invalid connection string: {0}", connectionString));
var username = uriMatch.Groups[1].Value;
if(!string.IsNullOrEmpty(username))
Username = username;
var password = uriMatch.Groups[2].Value;
if(!string.IsNullOrEmpty(password))
Password = password;
var servers = uriMatch.Groups[3].Value;
if(!string.IsNullOrEmpty(servers))
ParseServers(servers);
var database = uriMatch.Groups[4].Value;
if(!string.IsNullOrEmpty(database))
Database = database;
var values = uriMatch.Groups[5].Value;
if(!string.IsNullOrEmpty(values))
foreach(var pair in values.Split('&'))
ParseValuePair(pair);
}
/// <summary>
/// Parses the specified connection string.
/// </summary>
/// <param name = "connectionString">The connection string.</param>
private void Parse(string connectionString)
{
if(connectionString == null)
throw new ArgumentNullException("connectionString");
var segments = connectionString.Split(';');
foreach(var segment in segments)
ParseValuePair(segment);
}
/// <summary>
/// Parses the value pair.
/// </summary>
/// <param name = "pair">The pair.</param>
private void ParseValuePair(string pair)
{
var pairMatch = PairRegex.Match(pair);
if(!pairMatch.Success)
throw new FormatException(string.Format("Invalid connection string on: {0}", pairMatch.Value));
var key = pairMatch.Groups[1].Value;
var value = pairMatch.Groups[2].Value;
ParseValuePair(key, value);
}
/// <summary>
/// Parses the specified key.
/// </summary>
/// <param name = "key">The key.</param>
/// <param name = "value">The value.</param>
private void ParseValuePair(string key, string value)
{
switch(key.ToLower())
{
case "username":
case "user id":
case "user":
{
Username = value;
break;
}
case "password":
{
Password = value;
break;
}
case "pooled":
{
try
{
Pooled = bool.Parse(value);
}
catch(FormatException exception)
{
throw new FormatException("Invalid string for Pooled in connection string", exception);
}
break;
}
case "database":
case "data source":
{
Database = value;
break;
}
case "maximumpoolsize":
case "max pool size":
{
try
{
MaximumPoolSize = int.Parse(value);
}
catch(FormatException exception)
{
throw new FormatException("Invalid number for MaximumPoolSize in connection string", exception);
}
break;
}
case "minimumpoolsize":
case "min pool size":
{
try
{
MinimumPoolSize = int.Parse(value);
}
catch(FormatException exception)
{
throw new FormatException("Invalid number for MinimumPoolSize in connection string", exception);
}
break;
}
case "connectionlifetime":
case "connection lifetime":
{
try
{
var seconds = double.Parse(value);
ConnectionLifetime = seconds > 0 ? TimeSpan.FromSeconds(seconds) : DefaultConnectionLifeTime;
}
catch(FormatException exception)
{
throw new FormatException("Invalid number for ConnectionLifetime in connection string", exception);
}
break;
}
case "connectiontimeout":
case "connecttimeout":
{
try
{
var seconds = double.Parse(value);
ConnectionTimeout = seconds > 0 ? TimeSpan.FromSeconds(seconds) : DefaultConnectionTimeout;
}
catch(FormatException exception)
{
throw new FormatException("Invalid number for ConnectionTimeout in connection string", exception);
}
break;
}
case "server":
case "servers":
{
ParseServers(value);
break;
}
default:
throw new FormatException(string.Format("Unknown connection string option: {0}", key));
}
}
/// <summary>
/// Parses the servers.
/// </summary>
/// <param name = "value">The value.</param>
private void ParseServers(string value)
{
var servers = value.Split(',');
foreach(var serverMatch in servers.Select(server => ServerRegex.Match(server)))
{
if(!serverMatch.Success)
throw new FormatException(string.Format("Invalid server in connection string: {0}", serverMatch.Value));
var serverHost = serverMatch.Groups[1].Value;
int port;
if(int.TryParse(serverMatch.Groups[2].Value, out port))
AddServer(serverHost, port);
else
AddServer(serverHost);
}
}
/// <summary>
/// Adds the server.
/// </summary>
/// <param name = "endPoint">The end point.</param>
public void AddServer(MongoServerEndPoint endPoint)
{
if(endPoint == null)
throw new ArgumentNullException("endPoint");
_servers.Add(endPoint);
}
/// <summary>
/// Clears the servers.
/// </summary>
public void ClearServers()
{
_servers.Clear();
}
/// <summary>
/// Adds the server with the given host and default port.
/// </summary>
/// <param name = "host">The host.</param>
public void AddServer(string host)
{
AddServer(new MongoServerEndPoint(host));
}
/// <summary>
/// Adds the server with the given host and port.
/// </summary>
/// <param name = "host">The host.</param>
/// <param name = "port">The port.</param>
public void AddServer(string host, int port)
{
AddServer(new MongoServerEndPoint(host, port));
}
/// <summary>
/// Returns a
/// <see cref = "System.String" />
/// that represents this instance.
/// </summary>
/// <returns>A
/// <see cref = "System.String" />
/// that represents this instance.</returns>
public override string ToString()
{
var builder = new StringBuilder();
if(!string.IsNullOrEmpty(Username))
builder.AppendFormat("Username={0};", Username);
if(!string.IsNullOrEmpty(Password))
builder.AppendFormat("Password={0};", Password);
if(_servers.Count > 0)
{
builder.Append("Server=");
foreach(var server in _servers)
{
builder.Append(server.Host);
if(server.Port != MongoServerEndPoint.DefaultPort)
builder.AppendFormat(":{0}", server.Port);
builder.Append(',');
}
// remove last ,
builder.Remove(builder.Length - 1, 1);
builder.Append(';');
}
if(Pooled != true)
builder.AppendFormat("Pooled={0};", Pooled);
if(MaximumPoolSize != DefaultMaximumPoolSize)
builder.AppendFormat("MaximumPoolSize={0};", MaximumPoolSize);
if(MinimumPoolSize != DefaultMinimumPoolSize)
builder.AppendFormat("MinimumPoolSize={0};", MinimumPoolSize);
if(ConnectionTimeout != DefaultConnectionTimeout)
builder.AppendFormat("ConnectionTimeout={0};", ConnectionTimeout.TotalSeconds);
if(ConnectionLifetime != DefaultConnectionLifeTime)
builder.AppendFormat("ConnectionLifetime={0};", ConnectionLifetime.TotalSeconds);
// remove last ;
if(builder.Length > 0)
builder.Remove(builder.Length - 1, 1);
return builder.ToString();
}
}
}
| |
#region License
/*
The MIT License
Copyright (c) 2008 Sky Morey
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
*/
#endregion
using System.Linq;
using System.Collections.Generic;
using System.Linq.Expressions;
using System.Collections.ObjectModel;
namespace System.Abstract.EventSourcing
{
/// <summary>
/// IAggregateRootRepository
/// </summary>
public interface IAggregateRootRepository
{
/// <summary>
/// Gets the by ID.
/// </summary>
/// <typeparam name="TAggregateRoot">The type of the aggregate root.</typeparam>
/// <param name="aggregateID">The aggregate ID.</param>
/// <param name="queryOptions">The query options.</param>
/// <returns></returns>
TAggregateRoot GetByID<TAggregateRoot>(object aggregateID, AggregateRootQueryOptions queryOptions)
where TAggregateRoot : AggregateRoot;
/// <summary>
/// Gets the many by I ds.
/// </summary>
/// <typeparam name="TAggregateRoot">The type of the aggregate root.</typeparam>
/// <param name="aggregateIDs">The aggregate I ds.</param>
/// <param name="queryOptions">The query options.</param>
/// <returns></returns>
IEnumerable<TAggregateRoot> GetManyByIDs<TAggregateRoot>(IEnumerable<object> aggregateIDs, AggregateRootQueryOptions queryOptions)
where TAggregateRoot : AggregateRoot;
/// <summary>
/// Gets the events by ID.
/// </summary>
/// <param name="aggregateID">The aggregate ID.</param>
/// <returns></returns>
IEnumerable<Event> GetEventsByID(object aggregateID);
/// <summary>
/// Saves the specified aggregate.
/// </summary>
/// <param name="aggregate">The aggregate.</param>
void Save(AggregateRoot aggregate);
/// <summary>
/// Saves the specified aggregate.
/// </summary>
/// <param name="aggregate">The aggregate.</param>
void Save(IEnumerable<AggregateRoot> aggregate);
/// <summary>
/// Makes the snapshot.
/// </summary>
/// <param name="aggregate">The aggregate.</param>
/// <param name="predicate">The predicate.</param>
void MakeSnapshot(AggregateRoot aggregate, Func<IAggregateRootRepository, AggregateRoot, bool> predicate);
}
/// <summary>
/// AggregateRootRepository
/// </summary>
public class AggregateRootRepository : IAggregateRootRepository
{
private readonly IEventStore _eventStore;
private readonly IBatchedEventStore _batchedEventStore;
private readonly IAggregateRootSnapshotStore _snapshotStore;
private readonly IBatchedAggregateRootSnapshotStore _batchedSnapshotStore;
private readonly Action<IEnumerable<Event>> _eventDispatcher;
private readonly Func<Type, AggregateRoot> _factory;
/// <summary>
/// Initializes a new instance of the <see cref="AggregateRootRepository"/> class.
/// </summary>
/// <param name="eventStore">The event store.</param>
/// <param name="snapshotStore">The snapshot store.</param>
/// <param name="eventDispatcher">The event dispatcher.</param>
/// <param name="factory">The factory.</param>
/// <exception cref="System.ArgumentNullException">eventStore</exception>
public AggregateRootRepository(IEventStore eventStore, IAggregateRootSnapshotStore snapshotStore, Action<IEnumerable<Event>> eventDispatcher = null, Func<Type, AggregateRoot> factory = null)
{
if (eventStore == null)
throw new ArgumentNullException("eventStore");
_eventStore = eventStore;
_batchedEventStore = (eventStore as IBatchedEventStore);
_snapshotStore = snapshotStore;
_batchedSnapshotStore = (snapshotStore as IBatchedAggregateRootSnapshotStore);
_eventDispatcher = eventDispatcher;
_factory = (factory ?? EventSource.DefaultFactory.Factory);
}
/// <summary>
/// Gets the events by ID.
/// </summary>
/// <param name="aggregateID">The aggregate ID.</param>
/// <returns></returns>
public IEnumerable<Event> GetEventsByID(object aggregateID)
{
return _eventStore.GetEventsByID(aggregateID, 0);
}
/// <summary>
/// Gets the by ID.
/// </summary>
/// <typeparam name="TAggregateRoot">The type of the aggregate root.</typeparam>
/// <param name="aggregateID">The aggregate ID.</param>
/// <param name="queryOptions">The query options.</param>
/// <returns></returns>
public TAggregateRoot GetByID<TAggregateRoot>(object aggregateID, AggregateRootQueryOptions queryOptions)
where TAggregateRoot : AggregateRoot
{
if (aggregateID == null)
throw new ArgumentNullException("aggregateID");
var aggregate = (_factory(typeof(TAggregateRoot)) as TAggregateRoot);
if (aggregate == null)
throw new InvalidOperationException("aggregate");
// find snapshot
var loaded = false;
AggregateRootSnapshot snapshot = null;
if (_snapshotStore != null)
{
var snapshoter = (aggregate as ICanAggregateRootSnapshot);
if (snapshoter != null && (snapshot = _snapshotStore.GetLatestSnapshot<TAggregateRoot>(aggregateID)) != null)
{
loaded = true;
snapshoter.LoadSnapshot(snapshot);
}
}
// load events
var events = _eventStore.GetEventsByID(aggregateID, (snapshot != null ? snapshot.LastEventSequence : 0));
loaded |= ((IAggregateRootStateAccessor)aggregate).LoadFromHistory(events);
return ((queryOptions & AggregateRootQueryOptions.UseNullAggregates) == 0 ? aggregate : (loaded ? aggregate : null));
}
/// <summary>
/// Gets the many by I ds.
/// </summary>
/// <typeparam name="TAggregateRoot">The type of the aggregate root.</typeparam>
/// <param name="aggregateIDs">The aggregate I ds.</param>
/// <param name="queryOptions">The query options.</param>
/// <returns></returns>
public IEnumerable<TAggregateRoot> GetManyByIDs<TAggregateRoot>(IEnumerable<object> aggregateIDs, AggregateRootQueryOptions queryOptions)
where TAggregateRoot : AggregateRoot
{
if (aggregateIDs == null)
throw new ArgumentNullException("aggregateIDs");
return aggregateIDs.Select(x => GetByID<TAggregateRoot>(x, queryOptions)).ToList();
}
/// <summary>
/// Saves the specified aggregate.
/// </summary>
/// <param name="aggregate">The aggregate.</param>
public void Save(AggregateRoot aggregate)
{
if (aggregate == null)
throw new ArgumentNullException("aggregate");
var accessAggregateState = (IAggregateRootStateAccessor)aggregate;
var events = accessAggregateState.GetUncommittedChanges();
_eventStore.SaveEvents(aggregate.AggregateID, events);
if (_eventDispatcher != null)
_eventDispatcher(events);
accessAggregateState.MarkChangesAsCommitted();
Func<IAggregateRootRepository, AggregateRoot, bool> inlineSnapshotPredicate;
if (_snapshotStore != null && (inlineSnapshotPredicate = _snapshotStore.InlineSnapshotPredicate) != null && aggregate is ICanAggregateRootSnapshot)
MakeSnapshot(aggregate, inlineSnapshotPredicate);
}
/// <summary>
/// Saves the specified aggregates.
/// </summary>
/// <param name="aggregates">The aggregates.</param>
public void Save(IEnumerable<AggregateRoot> aggregates)
{
if (aggregates == null)
throw new ArgumentNullException("aggregates");
foreach (var aggregate in aggregates)
Save(aggregate);
}
/// <summary>
/// Makes the snapshot.
/// </summary>
/// <param name="aggregate">The aggregate.</param>
/// <param name="predicate">The predicate.</param>
public void MakeSnapshot(AggregateRoot aggregate, Func<IAggregateRootRepository, AggregateRoot, bool> predicate)
{
if (aggregate == null)
throw new ArgumentNullException("aggregate");
ICanAggregateRootSnapshot snapshoter;
if (_snapshotStore != null && (snapshoter = (aggregate as ICanAggregateRootSnapshot)) != null)
if (predicate == null || predicate(this, aggregate))
_snapshotStore.SaveSnapshot(aggregate.GetType(), snapshoter.GetSnapshot());
}
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V10.Services
{
/// <summary>Settings for <see cref="CustomerFeedServiceClient"/> instances.</summary>
public sealed partial class CustomerFeedServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="CustomerFeedServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="CustomerFeedServiceSettings"/>.</returns>
public static CustomerFeedServiceSettings GetDefault() => new CustomerFeedServiceSettings();
/// <summary>Constructs a new <see cref="CustomerFeedServiceSettings"/> object with default settings.</summary>
public CustomerFeedServiceSettings()
{
}
private CustomerFeedServiceSettings(CustomerFeedServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
MutateCustomerFeedsSettings = existing.MutateCustomerFeedsSettings;
OnCopy(existing);
}
partial void OnCopy(CustomerFeedServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>CustomerFeedServiceClient.MutateCustomerFeeds</c> and
/// <c>CustomerFeedServiceClient.MutateCustomerFeedsAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings MutateCustomerFeedsSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="CustomerFeedServiceSettings"/> object.</returns>
public CustomerFeedServiceSettings Clone() => new CustomerFeedServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="CustomerFeedServiceClient"/> to provide simple configuration of credentials,
/// endpoint etc.
/// </summary>
internal sealed partial class CustomerFeedServiceClientBuilder : gaxgrpc::ClientBuilderBase<CustomerFeedServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public CustomerFeedServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public CustomerFeedServiceClientBuilder()
{
UseJwtAccessWithScopes = CustomerFeedServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref CustomerFeedServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<CustomerFeedServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override CustomerFeedServiceClient Build()
{
CustomerFeedServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<CustomerFeedServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<CustomerFeedServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private CustomerFeedServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return CustomerFeedServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<CustomerFeedServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return CustomerFeedServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => CustomerFeedServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => CustomerFeedServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => CustomerFeedServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>CustomerFeedService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to manage customer feeds.
/// </remarks>
public abstract partial class CustomerFeedServiceClient
{
/// <summary>
/// The default endpoint for the CustomerFeedService service, which is a host of "googleads.googleapis.com" and
/// a port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default CustomerFeedService scopes.</summary>
/// <remarks>
/// The default CustomerFeedService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="CustomerFeedServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use
/// <see cref="CustomerFeedServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="CustomerFeedServiceClient"/>.</returns>
public static stt::Task<CustomerFeedServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new CustomerFeedServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="CustomerFeedServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use
/// <see cref="CustomerFeedServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="CustomerFeedServiceClient"/>.</returns>
public static CustomerFeedServiceClient Create() => new CustomerFeedServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="CustomerFeedServiceClient"/> which uses the specified call invoker for remote
/// operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="CustomerFeedServiceSettings"/>.</param>
/// <returns>The created <see cref="CustomerFeedServiceClient"/>.</returns>
internal static CustomerFeedServiceClient Create(grpccore::CallInvoker callInvoker, CustomerFeedServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
CustomerFeedService.CustomerFeedServiceClient grpcClient = new CustomerFeedService.CustomerFeedServiceClient(callInvoker);
return new CustomerFeedServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC CustomerFeedService client</summary>
public virtual CustomerFeedService.CustomerFeedServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual MutateCustomerFeedsResponse MutateCustomerFeeds(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(MutateCustomerFeedsRequest request, st::CancellationToken cancellationToken) =>
MutateCustomerFeedsAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose customer feeds are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual customer feeds.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual MutateCustomerFeedsResponse MutateCustomerFeeds(string customerId, scg::IEnumerable<CustomerFeedOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
MutateCustomerFeeds(new MutateCustomerFeedsRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose customer feeds are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual customer feeds.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(string customerId, scg::IEnumerable<CustomerFeedOperation> operations, gaxgrpc::CallSettings callSettings = null) =>
MutateCustomerFeedsAsync(new MutateCustomerFeedsRequest
{
CustomerId = gax::GaxPreconditions.CheckNotNullOrEmpty(customerId, nameof(customerId)),
Operations =
{
gax::GaxPreconditions.CheckNotNull(operations, nameof(operations)),
},
}, callSettings);
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="customerId">
/// Required. The ID of the customer whose customer feeds are being modified.
/// </param>
/// <param name="operations">
/// Required. The list of operations to perform on individual customer feeds.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(string customerId, scg::IEnumerable<CustomerFeedOperation> operations, st::CancellationToken cancellationToken) =>
MutateCustomerFeedsAsync(customerId, operations, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>CustomerFeedService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to manage customer feeds.
/// </remarks>
public sealed partial class CustomerFeedServiceClientImpl : CustomerFeedServiceClient
{
private readonly gaxgrpc::ApiCall<MutateCustomerFeedsRequest, MutateCustomerFeedsResponse> _callMutateCustomerFeeds;
/// <summary>
/// Constructs a client wrapper for the CustomerFeedService service, with the specified gRPC client and
/// settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="CustomerFeedServiceSettings"/> used within this client.</param>
public CustomerFeedServiceClientImpl(CustomerFeedService.CustomerFeedServiceClient grpcClient, CustomerFeedServiceSettings settings)
{
GrpcClient = grpcClient;
CustomerFeedServiceSettings effectiveSettings = settings ?? CustomerFeedServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callMutateCustomerFeeds = clientHelper.BuildApiCall<MutateCustomerFeedsRequest, MutateCustomerFeedsResponse>(grpcClient.MutateCustomerFeedsAsync, grpcClient.MutateCustomerFeeds, effectiveSettings.MutateCustomerFeedsSettings).WithGoogleRequestParam("customer_id", request => request.CustomerId);
Modify_ApiCall(ref _callMutateCustomerFeeds);
Modify_MutateCustomerFeedsApiCall(ref _callMutateCustomerFeeds);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_MutateCustomerFeedsApiCall(ref gaxgrpc::ApiCall<MutateCustomerFeedsRequest, MutateCustomerFeedsResponse> call);
partial void OnConstruction(CustomerFeedService.CustomerFeedServiceClient grpcClient, CustomerFeedServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC CustomerFeedService client</summary>
public override CustomerFeedService.CustomerFeedServiceClient GrpcClient { get; }
partial void Modify_MutateCustomerFeedsRequest(ref MutateCustomerFeedsRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override MutateCustomerFeedsResponse MutateCustomerFeeds(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_MutateCustomerFeedsRequest(ref request, ref callSettings);
return _callMutateCustomerFeeds.Sync(request, callSettings);
}
/// <summary>
/// Creates, updates, or removes customer feeds. Operation statuses are
/// returned.
///
/// List of thrown errors:
/// [AuthenticationError]()
/// [AuthorizationError]()
/// [CollectionSizeError]()
/// [CustomerFeedError]()
/// [DatabaseError]()
/// [DistinctError]()
/// [FieldError]()
/// [FieldMaskError]()
/// [FunctionError]()
/// [FunctionParsingError]()
/// [HeaderError]()
/// [IdError]()
/// [InternalError]()
/// [MutateError]()
/// [NotEmptyError]()
/// [OperatorError]()
/// [QuotaError]()
/// [RangeError]()
/// [RequestError]()
/// [SizeLimitError]()
/// [StringFormatError]()
/// [StringLengthError]()
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<MutateCustomerFeedsResponse> MutateCustomerFeedsAsync(MutateCustomerFeedsRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_MutateCustomerFeedsRequest(ref request, ref callSettings);
return _callMutateCustomerFeeds.Async(request, callSettings);
}
}
}
| |
//
// $Id: MruStripMenu.cs 1593 2009-12-03 17:21:14Z chambm $
//
using System;
using System.Collections;
using System.ComponentModel;
using System.IO;
using System.Text;
using System.Windows.Forms;
using System.Threading;
using Microsoft.Win32;
namespace JWC
{
/// <summary>
/// Represents a most recently used (MRU) menu.
/// </summary>
/// <remarks>This class shows the MRU list in a popup menu. To display
/// the MRU list "inline" use <see labelName="MruMenuInline" />.
/// <para>The class will optionally load the last set of files from the registry
/// on construction and store them when instructed by the main program.</para>
/// <para>Internally, this class uses zero-based numbering for the items.
/// The displayed numbers, however, will start with one.</para></remarks>
public class MruStripMenu
{
private ClickedHandler clickedHandler;
protected ToolStripMenuItem recentFileMenuItem;
protected string registryKeyName;
protected int numEntries = 0;
protected int maxEntries = 4;
protected int maxShortenPathLength = 96;
protected Mutex mruStripMutex;
#region MruMenuItem
/// <summary>
/// The menu item which will contain the MRU entry.
/// </summary>
/// <remarks>The menu may display a shortened or otherwise invalid pathname.
/// This class stores the actual filename, preferably as a fully
/// resolved labelName, that will be returned in the event handler.</remarks>
public class MruMenuItem : ToolStripMenuItem
{
/// <summary>
/// Initializes a new instance of the MruMenuItem class.
/// </summary>
public MruMenuItem()
{
Tag = "";
}
/// <summary>
/// Initializes an MruMenuItem object.
/// </summary>
/// <param labelName="filename">The string to actually return in the <paramref labelName="eventHandler">eventHandler</paramref>.</param>
/// <param labelName="entryname">The string that will be displayed in the menu.</param>
/// <param labelName="eventHandler">The <see cref="EventHandler">EventHandler</see> that
/// handles the <see cref="MenuItem.Click">Click</see> event for this menu item.</param>
public MruMenuItem(string filename, string entryname, EventHandler eventHandler)
{
Tag = filename;
Text = entryname;
Click += eventHandler;
}
/// <summary>
/// Gets the filename.
/// </summary>
/// <value>Gets the filename.</value>
public string Filename
{
get
{
return (string) Tag;
}
set
{
Tag = value;
}
}
}
#endregion
#region Construction
protected MruStripMenu() {}
/// <summary>
/// Initializes a new instance of the MruMenu class.
/// </summary>
/// <param labelName="recentFileMenuItem">The temporary menu item which will be replaced with the MRU list.</param>
/// <param labelName="clickedHandler">The delegate to handle the item selection (click) event.</param>
public MruStripMenu(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler)
: this(recentFileMenuItem, clickedHandler, null, false, 4)
{
}
/// <summary>
/// Initializes a new instance of the MruMenu class.
/// </summary>
/// <param labelName="recentFileMenuItem">The temporary menu item which will be replaced with the MRU list.</param>
/// <param labelName="clickedHandler">The delegate to handle the item selection (click) event.</param>
/// <param labelName="maxEntries"></param>
public MruStripMenu(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, int maxEntries)
: this(recentFileMenuItem, clickedHandler, null, false, maxEntries)
{
}
/// <summary>
/// Initializes a new instance of the MruMenu class.
/// </summary>
/// <param labelName="recentFileMenuItem">The temporary menu item which will be replaced with the MRU list.</param>
/// <param labelName="clickedHandler">The delegate to handle the item selection (click) event.</param>
/// <param labelName="registryKeyName"></param>
public MruStripMenu(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName)
: this(recentFileMenuItem, clickedHandler, registryKeyName, true, 4)
{
}
/// <summary>
/// Initializes a new instance of the MruMenu class.
/// </summary>
/// <param labelName="recentFileMenuItem">The temporary menu item which will be replaced with the MRU list.</param>
/// <param labelName="clickedHandler">The delegate to handle the item selection (click) event.</param>
/// <param labelName="registryKeyName">The name or path of the registry key to use to store the MRU list and settings.</param>
/// <param labelName="maxEntries">The maximum number of items on the MRU list.</param>
public MruStripMenu(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, int maxEntries)
: this(recentFileMenuItem, clickedHandler, registryKeyName, true, maxEntries)
{
}
/// <summary>
/// Initializes a new instance of the MruMenu class.
/// </summary>
/// <param labelName="recentFileMenuItem">The temporary menu item which will be replaced with the MRU list.</param>
/// <param labelName="clickedHandler">The delegate to handle the item selection (click) event.</param>
/// <param labelName="registryKeyName">The name or path of the registry key to use to store the MRU list and settings.</param>
/// <param labelName="loadFromRegistry">Loads the MRU settings from the registry immediately.</param>
public MruStripMenu(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, bool loadFromRegistry)
: this(recentFileMenuItem, clickedHandler, registryKeyName, loadFromRegistry, 4)
{
}
/// <summary>
/// Initializes a new instance of the MruMenu class.
/// </summary>
/// <param labelName="recentFileMenuItem">The temporary menu item which will be replaced with the MRU list.</param>
/// <param labelName="clickedHandler">The delegate to handle the item selection (click) event.</param>
/// <param labelName="registryKeyName">The name or path of the registry key to use to store the MRU list and settings.</param>
/// <param labelName="loadFromRegistry">Loads the MRU settings from the registry immediately.</param>
/// <param labelName="maxEntries">The maximum number of items on the MRU list.</param>
public MruStripMenu(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, bool loadFromRegistry, int maxEntries)
{
Init(recentFileMenuItem, clickedHandler, registryKeyName, loadFromRegistry, maxEntries);
}
protected void Init(ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, bool loadFromRegistry, int maxEntries)
{
if (recentFileMenuItem == null)
throw new ArgumentNullException("recentFileMenuItem");
this.recentFileMenuItem = recentFileMenuItem;
this.recentFileMenuItem.Checked = false;
this.recentFileMenuItem.Enabled = false;
MaxEntries = maxEntries;
this.clickedHandler = clickedHandler;
if (registryKeyName != null)
{
RegistryKeyName = registryKeyName;
if (loadFromRegistry)
LoadFromRegistry();
}
}
#endregion
#region Event Handling
public delegate void ClickedHandler(int number, string filename);
protected void OnClick(object sender, System.EventArgs e)
{
MruMenuItem menuItem = (MruMenuItem) sender;
clickedHandler(MenuItems.IndexOf(menuItem) - StartIndex, menuItem.Filename);
}
#endregion
#region Properties
public virtual ToolStripItemCollection MenuItems
{
get
{
return recentFileMenuItem.DropDownItems;
}
}
public virtual int StartIndex
{
get
{
return 0;
}
}
public virtual int EndIndex
{
get
{
return numEntries;
}
}
public int NumEntries
{
get
{
return numEntries;
}
}
public int MaxEntries
{
get
{
return maxEntries;
}
set
{
if (value > 16)
{
maxEntries = 16;
}
else
{
maxEntries = value < 4 ? 4 : value;
int index = StartIndex + maxEntries;
while (numEntries > maxEntries)
{
MenuItems.RemoveAt(index);
numEntries--;
}
}
}
}
public int MaxShortenPathLength
{
get
{
return maxShortenPathLength;
}
set
{
maxShortenPathLength = value < 16 ? 16 : value;
}
}
public virtual bool IsInline
{
get
{
return false;
}
}
#endregion
#region Helper Methods
protected virtual void Enable()
{
recentFileMenuItem.Enabled = true;
}
protected virtual void Disable()
{
recentFileMenuItem.Enabled = false;
//recentFileMenuItem.MenuItems.RemoveAt(0);
}
protected virtual void SetFirstFile(MruMenuItem menuItem)
{
}
public void SetFirstFile(int number)
{
if (number > 0 && numEntries > 1 && number < numEntries)
{
MruMenuItem menuItem = (MruMenuItem)MenuItems[StartIndex + number];
MenuItems.RemoveAt(StartIndex + number);
MenuItems.Insert(StartIndex, menuItem);
SetFirstFile(menuItem);
FixupPrefixes(0);
}
}
public static string FixupEntryname(int number, string entryname)
{
if (number < 9)
return "&" + (number + 1) + " " + entryname;
else if (number == 9)
return "1&0" + " " + entryname;
else
return (number + 1) + " " + entryname;
}
protected void FixupPrefixes(int startNumber)
{
if (startNumber < 0)
startNumber = 0;
if (startNumber < maxEntries)
{
for (int i = StartIndex + startNumber; i < EndIndex; i++, startNumber++)
{
int offset = MenuItems[i].Text.Substring(0, 3) == "1&0" ? 5 : 4;
MenuItems[i].Text = FixupEntryname(startNumber, MenuItems[i].Text.Substring(offset));
// MenuItems[i].Text = FixupEntryname(startNumber, MenuItems[i].Text.Substring(startNumber == 10 ? 5 : 4));
}
}
}
/// <summary>
/// Shortens a pathname for display purposes.
/// </summary>
/// <param labelName="pathname">The pathname to shorten.</param>
/// <param labelName="maxLength">The maximum number of characters to be displayed.</param>
/// <remarks>Shortens a pathname by either removing consecutive components of a path
/// and/or by removing characters from the end of the filename and replacing
/// then with three elipses (...)
/// <para>In all cases, the root of the passed path will be preserved in it's entirety.</para>
/// <para>If a UNC path is used or the pathname and maxLength are particularly short,
/// the resulting path may be longer than maxLength.</para>
/// <para>This method expects fully resolved pathnames to be passed to it.
/// (Use Path.GetFullPath() to obtain this.)</para>
/// </remarks>
/// <returns></returns>
static public string ShortenPathname(string pathname, int maxLength)
{
if (pathname.Length <= maxLength)
return pathname;
string root = Path.GetPathRoot(pathname);
if (root.Length > 3)
root += Path.DirectorySeparatorChar;
string[] elements = pathname.Substring(root.Length).Split(Path.DirectorySeparatorChar, Path.AltDirectorySeparatorChar);
int filenameIndex = elements.GetLength(0) - 1;
if (elements.GetLength(0) == 1) // pathname is just a root and filename
{
if (elements[0].Length > 5) // long enough to shorten
{
// if path is a UNC path, root may be rather long
if (root.Length + 6 >= maxLength)
{
return root + elements[0].Substring(0, 3) + "...";
}
else
{
return pathname.Substring(0, maxLength - 3) + "...";
}
}
}
else if ((root.Length + 4 + elements[filenameIndex].Length) > maxLength) // pathname is just a root and filename
{
root += "...\\";
int len = elements[filenameIndex].Length;
if (len < 6)
return root + elements[filenameIndex];
if ((root.Length + 6) >= maxLength)
{
len = 3;
}
else
{
len = maxLength - root.Length - 3;
}
return root + elements[filenameIndex].Substring(0, len) + "...";
}
else if (elements.GetLength(0) == 2)
{
return root + "...\\" + elements[1];
}
else
{
int len = 0;
int begin = 0;
for (int i = 0; i < filenameIndex; i++)
{
if (elements[i].Length > len)
{
begin = i;
len = elements[i].Length;
}
}
int totalLength = pathname.Length - len + 3;
int end = begin + 1;
while (totalLength > maxLength)
{
if (begin > 0)
totalLength -= elements[--begin].Length - 1;
if (totalLength <= maxLength)
break;
if (end < filenameIndex)
totalLength -= elements[++end].Length - 1;
if (begin == 0 && end == filenameIndex)
break;
}
// assemble final string
for (int i = 0; i < begin; i++)
{
root += elements[i] + '\\';
}
root += "...\\";
for (int i = end; i < filenameIndex; i++)
{
root += elements[i] + '\\';
}
return root + elements[filenameIndex];
}
return pathname;
}
#endregion
#region Get Methods
/// <summary>
/// Returns the entry number matching the passed filename.
/// </summary>
/// <param name="filename">The filename to search for.</param>
/// <returns>The entry number of the matching filename or -1 if not found.</returns>
public int FindFilenameNumber(string filename)
{
if (filename == null)
throw new ArgumentNullException("filename");
if (filename.Length == 0)
throw new ArgumentException("filename");
if (numEntries > 0)
{
int number = 0;
for (int i = StartIndex; i < EndIndex; i++, number++)
{
if (string.Compare(((MruMenuItem)MenuItems[i]).Filename, filename, true) == 0)
{
return number;
}
}
}
return -1;
}
/// <summary>
/// Returns the menu index of the passed filename.
/// </summary>
/// <param name="filename">The filename to search for.</param>
/// <returns>The menu index of the matching filename or -1 if not found.</returns>
public int FindFilenameMenuIndex(string filename)
{
int number = FindFilenameNumber(filename);
return number < 0 ? -1 : StartIndex + number;
}
/// <summary>
/// Returns the menu index for a specified MRU item number.
/// </summary>
/// <param name="number">The MRU item number.</param>
/// <exception cref="ArgumentOutOfRangeException"></exception>
/// <returns>The menu index of the passed MRU number.</returns>
public int GetMenuIndex(int number)
{
if (number < 0 || number >= numEntries)
throw new ArgumentOutOfRangeException("number");
return StartIndex + number;
}
public string GetFileAt(int number)
{
if (number < 0 || number >= numEntries)
throw new ArgumentOutOfRangeException("number");
return ((MruMenuItem)MenuItems[StartIndex + number]).Filename;
}
public string[] GetFiles()
{
string[] filenames = new string[numEntries];
int index = StartIndex;
for (int i = 0; i < filenames.GetLength(0); i++, index++)
{
filenames[i] = ((MruMenuItem)MenuItems[index]).Filename;
}
return filenames;
}
// This is used for testing
public string[] GetFilesFullEntrystring()
{
string[] filenames = new string[numEntries];
int index = StartIndex;
for (int i = 0; i < filenames.GetLength(0); i++, index++)
{
filenames[i] = MenuItems[index].Text;
}
return filenames;
}
#endregion
#region Add Methods
public void SetFiles(string[] filenames)
{
RemoveAll();
for (int i = filenames.GetLength(0) - 1; i >= 0; i--)
{
AddFile(filenames[i]);
}
}
public void AddFiles(string[] filenames)
{
for (int i = filenames.GetLength(0) - 1; i >= 0; i--)
{
AddFile(filenames[i]);
}
}
public void AddFile(string filename)
{
string pathname = Path.GetFullPath(filename);
AddFile(pathname, ShortenPathname(pathname, MaxShortenPathLength));
}
public void AddFile(string filename, string entryname)
{
if (filename == null)
throw new ArgumentNullException("filename");
if (filename.Length == 0)
throw new ArgumentException("filename");
if (numEntries > 0)
{
int index = FindFilenameMenuIndex(filename);
if (index >= 0)
{
SetFirstFile(index - StartIndex);
return;
}
}
if (numEntries < maxEntries)
{
MruMenuItem menuItem = new MruMenuItem(filename, FixupEntryname(0, entryname), new System.EventHandler(OnClick));
MenuItems.Insert(StartIndex, menuItem);
SetFirstFile(menuItem);
if (numEntries++ == 0)
{
Enable();
}
else
{
FixupPrefixes(1);
}
}
else if (numEntries > 1)
{
MruMenuItem menuItem = (MruMenuItem) MenuItems[StartIndex + numEntries - 1];
MenuItems.RemoveAt(StartIndex + numEntries - 1);
menuItem.Text = FixupEntryname(0, entryname);
menuItem.Filename = filename;
MenuItems.Insert(StartIndex, menuItem);
SetFirstFile(menuItem);
FixupPrefixes(1);
}
}
#endregion
#region Remove Methods
public void RemoveFile(int number)
{
if (number >= 0 && number < numEntries)
{
if (--numEntries == 0)
{
Disable();
}
else
{
int startIndex = StartIndex;
if (number == 0)
{
SetFirstFile((MruMenuItem)MenuItems[startIndex + 1]);
}
MenuItems.RemoveAt(startIndex + number);
if (number < numEntries)
{
FixupPrefixes(number);
}
}
}
}
public void RemoveFile(string filename)
{
if (numEntries > 0)
{
RemoveFile(FindFilenameNumber(filename));
}
}
public void RemoveAll()
{
if (numEntries > 0)
{
for (int index = EndIndex - 1; index > StartIndex; index--)
{
MenuItems.RemoveAt(index);
}
Disable();
numEntries = 0;
}
}
#endregion
#region Rename Methods
public void RenameFile(string oldFilename, string newFilename)
{
string newPathname = Path.GetFullPath(newFilename);
RenameFile(Path.GetFullPath(oldFilename), newPathname, ShortenPathname(newPathname, MaxShortenPathLength));
}
public void RenameFile(string oldFilename, string newFilename, string newEntryname)
{
if (newFilename == null)
throw new ArgumentNullException("newFilename");
if (newFilename.Length == 0)
throw new ArgumentException("newFilename");
if (numEntries > 0)
{
int index = FindFilenameMenuIndex(oldFilename);
if (index >= 0)
{
MruMenuItem menuItem = (MruMenuItem)MenuItems[index];
menuItem.Text = FixupEntryname(0, newEntryname);
menuItem.Filename = newFilename;
return;
}
}
AddFile(newFilename, newEntryname);
}
#endregion
#region Registry Methods
public string RegistryKeyName
{
get
{
return registryKeyName;
}
set
{
if (mruStripMutex != null)
mruStripMutex.Close();
registryKeyName = value.Trim();
if (registryKeyName.Length == 0)
{
registryKeyName = null;
mruStripMutex = null;
}
else
{
string mutexName = registryKeyName.Replace('\\', '_').Replace('/', '_') + "Mutex";
mruStripMutex = new Mutex(false, mutexName);
}
}
}
public void LoadFromRegistry(string keyName)
{
RegistryKeyName = keyName;
LoadFromRegistry();
}
public void LoadFromRegistry()
{
if (registryKeyName != null)
{
mruStripMutex.WaitOne();
RemoveAll();
RegistryKey regKey = Registry.CurrentUser.OpenSubKey(registryKeyName);
if (regKey != null)
{
maxEntries = (int)regKey.GetValue("max", maxEntries);
for (int number = maxEntries; number > 0; number--)
{
string filename = (string)regKey.GetValue("File" + number.ToString());
if (filename != null)
AddFile(filename);
}
regKey.Close();
}
mruStripMutex.ReleaseMutex();
}
}
public void SaveToRegistry(string keyName)
{
RegistryKeyName = keyName;
SaveToRegistry();
}
public void SaveToRegistry()
{
if (registryKeyName != null)
{
mruStripMutex.WaitOne();
RegistryKey regKey = Registry.CurrentUser.CreateSubKey(registryKeyName);
if (regKey != null)
{
regKey.SetValue("max", maxEntries);
int number = 1;
int i = StartIndex;
for (; i < EndIndex; i++, number++)
{
regKey.SetValue("File" + number.ToString(), ((MruMenuItem)MenuItems[i]).Filename);
}
for (; number <= 16; number++)
{
regKey.DeleteValue("File" + number.ToString(), false);
}
regKey.Close();
}
mruStripMutex.ReleaseMutex();
}
}
#endregion
}
/// <summary>
/// Represents an inline most recently used (mru) menu.
/// </summary>
/// <remarks>
/// This class shows the MRU list "inline". To display
/// the MRU list as a popup menu use <see labelName="MruMenu">MruMenu</see>.
/// </remarks>
public class MruStripMenuInline : MruStripMenu
{
protected ToolStripMenuItem owningMenu;
protected ToolStripMenuItem firstMenuItem;
#region Construction
//private MruStripMenuInline(
public MruStripMenuInline(ToolStripMenuItem owningMenu, ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler)
: this(owningMenu, recentFileMenuItem, clickedHandler, null, false, 4)
{
}
public MruStripMenuInline(ToolStripMenuItem owningMenu, ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, int maxEntries)
: this(owningMenu, recentFileMenuItem, clickedHandler, null, false, maxEntries)
{
}
public MruStripMenuInline(ToolStripMenuItem owningMenu, ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName)
: this(owningMenu, recentFileMenuItem, clickedHandler, registryKeyName, true, 4)
{
}
public MruStripMenuInline(ToolStripMenuItem owningMenu, ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, int maxEntries)
: this(owningMenu, recentFileMenuItem, clickedHandler, registryKeyName, true, maxEntries)
{
}
public MruStripMenuInline(ToolStripMenuItem owningMenu, ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, bool loadFromRegistry)
: this(owningMenu, recentFileMenuItem, clickedHandler, registryKeyName, loadFromRegistry, 4)
{
}
public MruStripMenuInline(ToolStripMenuItem owningMenu, ToolStripMenuItem recentFileMenuItem, ClickedHandler clickedHandler, string registryKeyName, bool loadFromRegistry, int maxEntries)
{
maxShortenPathLength = 48;
this.owningMenu = owningMenu;
this.firstMenuItem = recentFileMenuItem;
Init(recentFileMenuItem, clickedHandler, registryKeyName, loadFromRegistry, maxEntries);
}
#endregion
#region Overridden Properties
public override ToolStripItemCollection MenuItems
{
get
{
return owningMenu.DropDownItems;
}
}
public override int StartIndex
{
get
{
return MenuItems.IndexOf(firstMenuItem);
}
}
public override int EndIndex
{
get
{
return StartIndex + numEntries;
}
}
public override bool IsInline
{
get
{
return true;
}
}
#endregion
#region Overridden Methods
protected override void Enable()
{
MenuItems.Remove(recentFileMenuItem);
}
protected override void SetFirstFile(MruMenuItem menuItem)
{
firstMenuItem = menuItem;
}
protected override void Disable()
{
int index = MenuItems.IndexOf(firstMenuItem);
MenuItems.RemoveAt(index);
MenuItems.Insert(index, recentFileMenuItem);
firstMenuItem = recentFileMenuItem;
}
#endregion
}
}
| |
/*
Copyright 2015 Shane Lillie
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Android.Content.PM;
using Android.OS;
using EnergonSoftware.BackpackPlanner.Core.Logging;
using EnergonSoftware.BackpackPlanner.Droid.Logging;
using EnergonSoftware.BackpackPlanner.Droid.Permissions;
namespace EnergonSoftware.BackpackPlanner.Droid.Activities
{
public abstract class BaseActivity : Android.Support.V7.App.AppCompatActivity
{
private static readonly ILogger Logger = CustomLogger.GetLogger(typeof(BaseActivity));
// this needs to be static for the managers it contains
private static BackpackPlannerState _backpackPlannerStateInstance;
// need a separate reference to this because it needs to
// track the current activity
private static DroidPermissionRequestFactory _permissionRequestFactory;
private static bool _preferencesLoaded;
static BaseActivity()
{
CustomLogger.ReverseLogBufferDirection = true;
CustomLogger.PlatformLogger = new DroidLogger();
}
public BackpackPlannerState BackpackPlannerState => _backpackPlannerStateInstance;
public AdManager AdManager { get; } = new AdManager();
public virtual int AppTheme => Resource.Style.AppTheme;
#if DEBUG_LIFECYCLE
private readonly Stopwatch _startupStopwatch = new Stopwatch();
#endif
public Android.Support.V7.Widget.Toolbar Toolbar { get; private set; }
private readonly Dictionary<DroidPermissionRequest.DroidPermissionRequestCode, List<DroidPermissionRequest>> _permissionRequests = new Dictionary<DroidPermissionRequest.DroidPermissionRequestCode, List<DroidPermissionRequest>>();
#region Activity Lifecycle
protected override void OnCreate(Bundle savedInstanceState)
{
#if DEBUG_LIFECYCLE
Logger.Debug($"OnCreate - {GetType()}");
_startupStopwatch.Start();
#endif
base.OnCreate(savedInstanceState);
SetTheme(AppTheme);
#if DEBUG
Logger.Debug($"Android ID: {Android.Provider.Settings.Secure.GetString(ContentResolver, Android.Provider.Settings.Secure.AndroidId)}");
#endif
AdManager.Initialize(this);
InitPermissions();
InitBackpackPlannerState().Wait();
BackpackPlannerState.DatabaseState.Init(System.Environment.GetFolderPath(System.Environment.SpecialFolder.Personal));
LoadPreferences(this);
((HockeyAppManager)_backpackPlannerStateInstance.PlatformHockeyAppManager).OnCreate(this);
}
protected override void OnDestroy()
{
#if DEBUG_LIFECYCLE
Logger.Debug($"OnDestroy - {GetType()}");
#endif
// remove waiting permission requests
foreach(var kvp in _permissionRequests) {
kvp.Value.RemoveAll(x => this == x.Activity);
}
// this is bad, we only want to do this if the
// full application is closed, not the activity
//_backpackPlannerState.Destroy();
base.OnDestroy();
}
protected override void OnStart()
{
#if DEBUG_LIFECYCLE
Logger.Debug($"OnStart - {GetType()}");
if(_startupStopwatch.IsRunning) {
Logger.Debug($"Time to Activity.Start(): {_startupStopwatch.ElapsedMilliseconds}ms");
}
#endif
base.OnStart();
}
protected override void OnStop()
{
#if DEBUG_LIFECYCLE
Logger.Debug($"OnStop - {GetType()}");
#endif
base.OnStop();
}
protected override void OnResume()
{
#if DEBUG_LIFECYCLE
Logger.Debug($"OnResume - {GetType()}");
if(_startupStopwatch.IsRunning) {
Logger.Debug($"Time to Activity.OnResume() finish: {_startupStopwatch.ElapsedMilliseconds}ms");
}
_startupStopwatch.Stop();
#endif
base.OnResume();
((HockeyAppManager)_backpackPlannerStateInstance.PlatformHockeyAppManager).OnResume(this);
}
protected override void OnPause()
{
#if DEBUG_LIFECYCLE
Logger.Debug($"OnPause - {GetType()}");
#endif
((HockeyAppManager)_backpackPlannerStateInstance.PlatformHockeyAppManager).OnPause(this);
base.OnPause();
}
#endregion
#region Permissions
// https://developer.android.com/training/permissions/index.html
// https://developer.android.com/training/permissions/requesting.html
/// <summary>
/// Permission request callback
/// </summary>
/// <param name="requestCode">The request code.</param>
/// <param name="permissions">The permissions.</param>
/// <param name="grantResults">The grant results.</param>
public override void OnRequestPermissionsResult(int requestCode, string[] permissions, Permission[] grantResults)
{
base.OnRequestPermissionsResult(requestCode, permissions, grantResults);
DroidPermissionRequest.DroidPermissionRequestCode droidRequestCode = (DroidPermissionRequest.DroidPermissionRequestCode)requestCode;
bool granted = grantResults.Length > 0 && grantResults[0] == Permission.Granted;
Logger.Info($"Got permission result for request code {droidRequestCode}: {granted}");
List<DroidPermissionRequest> requests;
if(!_permissionRequests.TryGetValue(droidRequestCode, out requests)) {
Logger.Warn($"Attempt to notify for request code {droidRequestCode}, which does not exist!");
return;
}
foreach(DroidPermissionRequest request in requests) {
request.Notify(granted);
}
requests.Clear();
}
/// <summary>
/// Checks for the given permission.
/// </summary>
/// <param name="permissionRequest">The permission request.</param>
/// <param name="showExplanation">Callback to show explanation.</param>
/// <returns>The permission request</returns>
/// <remarks>
/// Caller must Notify() on the returned request
/// </remarks>
public async Task CheckPermission(DroidPermissionRequest permissionRequest, Func<Task> showExplanation=null)
{
// TODO: does this need to be forced onto the UI thread?
Logger.Info($"Checking permission {permissionRequest.Permission} (DroidPermission={permissionRequest.DroidPermission}, RequestCode={permissionRequest.RequestCode})...");
// permission already granted
if(Permission.Granted == Android.Support.V4.Content.ContextCompat.CheckSelfPermission(this, permissionRequest.DroidPermission)) {
Logger.Info("Permission already granted, notifying...");
permissionRequest.Notify(true);
return;
}
// need to show rationale first? only happens if permission is denied
if(Android.Support.V4.App.ActivityCompat.ShouldShowRequestPermissionRationale(this, permissionRequest.DroidPermission)) {
Logger.Info("Permission rationale required...");
if(null == showExplanation) {
Logger.Info("No rationale specified, notifying denied...");
// no rationale to show, so the request is denied
permissionRequest.Notify(false);
return;
}
// wait for the user to get on board
Logger.Info("Showing rationale...");
await showExplanation().ConfigureAwait(false);
// re-check the permission (with no explanation this time)
Logger.Info("Re-checking permission...");
await CheckPermission(permissionRequest, null).ConfigureAwait(false);
return;
}
List<DroidPermissionRequest> requests;
if(!_permissionRequests.TryGetValue(permissionRequest.RequestCode, out requests)) {
requests = new List<DroidPermissionRequest>();
_permissionRequests.Add(permissionRequest.RequestCode, requests);
}
if(!requests.Any()) {
Logger.Info($"Requesting permission {permissionRequest.Permission} ({permissionRequest.DroidPermission}) using request code {(int)permissionRequest.RequestCode}...");
Android.Support.V4.App.ActivityCompat.RequestPermissions(this, new[] { permissionRequest.DroidPermission }, (int)permissionRequest.RequestCode);
}
requests.Add(permissionRequest);
}
#endregion
private void InitPermissions()
{
if(null == _permissionRequestFactory) {
_permissionRequestFactory = new DroidPermissionRequestFactory();
}
_permissionRequestFactory.Activity = this;
}
private async Task InitBackpackPlannerState()
{
if(null != _backpackPlannerStateInstance) {
return;
}
_backpackPlannerStateInstance = new BackpackPlannerState(
new HockeyAppManager(),
new DroidSettingsManager(Android.Support.V7.Preferences.PreferenceManager.GetDefaultSharedPreferences(this)),
new DroidPlayServicesManager(),
_permissionRequestFactory
);
// have to do this on the main thread
await _backpackPlannerStateInstance.InitAsync().ConfigureAwait(false);
}
private void LoadPreferences(BaseActivity activity)
{
if(_preferencesLoaded) {
return;
}
Logger.Debug("Setting default preferences...");
Android.Support.V7.Preferences.PreferenceManager.SetDefaultValues(activity, Resource.Xml.settings, false);
Logger.Debug("Loading preferences...");
_backpackPlannerStateInstance.PlatformSettingsManager.Load(_backpackPlannerStateInstance.Settings, _backpackPlannerStateInstance.PersonalInformation);
_preferencesLoaded = true;
}
protected void InitToolbar()
{
Toolbar = FindViewById<Android.Support.V7.Widget.Toolbar>(Resource.Id.toolbar);
SetSupportActionBar(Toolbar);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel.DataAnnotations;
using System.Linq;
using Signum.Entities;
using Signum.Utilities;
using Signum.React.ApiControllers;
using Signum.Engine.Basics;
using Signum.Entities.UserAssets;
using Signum.Entities.DynamicQuery;
using Signum.Engine;
using Signum.React.Files;
using Signum.Engine.UserAssets;
using System.IO;
using Microsoft.AspNetCore.Mvc;
using Signum.React.Filters;
using Signum.Entities.UserQueries;
using System.Collections.ObjectModel;
using Signum.React.Facades;
using System.Text.Json;
namespace Signum.React.UserAssets
{
[ValidateModelFilter]
public class UserAssetController : ControllerBase
{
public class ParseFiltersRequest
{
public string queryKey;
public bool canAggregate;
public List<QueryFilterItem> filters;
public Lite<Entity> entity;
}
[HttpPost("api/userAssets/parseFilters")]
public List<FilterNode> ParseFilters([Required, FromBody]ParseFiltersRequest request)
{
var queryName = QueryLogic.ToQueryName(request.queryKey);
var qd = QueryLogic.Queries.QueryDescription(queryName);
var options = SubTokensOptions.CanAnyAll | SubTokensOptions.CanElement | (request.canAggregate ? SubTokensOptions.CanAggregate : 0);
using (request.entity != null ? CurrentEntityConverter.SetCurrentEntity(request.entity.RetrieveAndRemember()) : null)
{
var result = ParseFilterInternal(request.filters, qd, options, 0).ToList();
return result;
}
}
static List<FilterNode> ParseFilterInternal(IEnumerable<QueryFilterItem> filters, QueryDescription qd, SubTokensOptions options, int indent)
{
return filters.GroupWhen(filter => filter.indentation == indent).Select(gr =>
{
if (!gr.Key.isGroup)
{
if (gr.Count() != 0)
throw new InvalidOperationException("Unexpected childrens of condition");
var filter = gr.Key;
var token = QueryUtils.Parse(filter.tokenString!, qd, options);
var value = FilterValueConverter.Parse(filter.valueString, token.Type, filter.operation!.Value.IsList());
return new FilterNode
{
token = new QueryTokenTS(token, true),
operation = filter.operation.Value,
value = value,
pinned = filter.pinned,
};
}
else
{
var group = gr.Key;
var token = group.tokenString == null ? null : QueryUtils.Parse(group.tokenString!, qd, options);
var value = FilterValueConverter.Parse(group.valueString, typeof(string), false);
return new FilterNode
{
groupOperation = group.groupOperation!.Value,
token = token == null ? null : new QueryTokenTS(token, true),
pinned = gr.Key.pinned,
filters = ParseFilterInternal(gr, qd, options, indent + 1).ToList()
};
}
}).ToList();
}
public class StringifyFiltersRequest
{
public string queryKey;
public bool canAggregate;
public List<FilterNode> filters;
}
[HttpPost("api/userAssets/stringifyFilters")]
public List<QueryFilterItem> StringifyFilters([Required, FromBody]StringifyFiltersRequest request)
{
var queryName = QueryLogic.ToQueryName(request.queryKey);
var qd = QueryLogic.Queries.QueryDescription(queryName);
var options = SubTokensOptions.CanAnyAll | SubTokensOptions.CanElement | (request.canAggregate ? SubTokensOptions.CanAggregate : 0);
List<QueryFilterItem> result = new List<QueryFilterItem>();
foreach (var f in request.filters)
{
result.AddRange(ToQueryFiltersEmbedded(f, qd, options, 0));
}
return result;
}
public static IEnumerable<QueryFilterItem> ToQueryFiltersEmbedded(FilterNode filter, QueryDescription qd, SubTokensOptions options, int ident = 0)
{
if (filter.groupOperation == null)
{
var token = QueryUtils.Parse(filter.tokenString!, qd, options);
var expectedValueType = filter.operation!.Value.IsList() ? typeof(ObservableCollection<>).MakeGenericType(token.Type.Nullify()) : token.Type;
var val = filter.value is JsonElement jtok ?
jtok.ToObject(expectedValueType, SignumServer.JsonSerializerOptions) :
filter.value;
yield return new QueryFilterItem
{
token = new QueryTokenTS(token, true),
operation = filter.operation,
valueString = FilterValueConverter.ToString(val, token.Type),
indentation = ident,
pinned = filter.pinned,
};
}
else
{
var token = filter.tokenString == null ? null : QueryUtils.Parse(filter.tokenString, qd, options);
yield return new QueryFilterItem
{
isGroup = true,
groupOperation = filter.groupOperation,
token = token == null ? null : new QueryTokenTS(token, true),
indentation = ident,
valueString = filter.value != null ? FilterValueConverter.ToString(filter.value, typeof(string)) : null,
pinned = filter.pinned,
};
foreach (var f in filter.filters)
{
foreach (var fe in ToQueryFiltersEmbedded(f, qd, options, ident + 1))
{
yield return fe;
}
}
}
}
public class QueryFilterItem
{
public QueryTokenTS? token;
public string? tokenString;
public bool isGroup;
public FilterGroupOperation? groupOperation;
public FilterOperation? operation;
public string? valueString;
public PinnedFilter pinned;
public int indentation;
}
public class PinnedFilter
{
public string label;
public int? row;
public int? column;
public PinnedFilterActive? active;
public bool? splitText;
}
public class FilterNode
{
public FilterGroupOperation? groupOperation;
public string? tokenString; //For Request
public QueryTokenTS? token; //For response
public FilterOperation? operation;
public object? value;
public List<FilterNode> filters;
public PinnedFilter pinned;
}
public class FilterElement
{
}
[HttpPost("api/userAssets/export")]
public FileStreamResult Export([Required, FromBody]Lite<IUserAssetEntity>[] lites)
{
var bytes = UserAssetsExporter.ToXml(lites.RetrieveFromListOfLite().ToArray());
string typeName = lites.Select(a => a.EntityType).Distinct().SingleEx().Name;
var fileName = "{0}{1}.xml".FormatWith(typeName, lites.ToString(a => a.Id.ToString(), "_"));
return FilesController.GetFileStreamResult(new MemoryStream(bytes), fileName);
}
[HttpPost("api/userAssets/importPreview")]
public UserAssetPreviewModel ImportPreview([Required, FromBody]FileUpload file)
{
return UserAssetsImporter.Preview(file.content);
}
[HttpPost("api/userAssets/import")]
public void Import([Required, FromBody]FileUploadWithModel file)
{
UserAssetsImporter.Import(file.file.content, file.model);
}
public class FileUpload
{
public string fileName;
public byte[] content;
}
public class FileUploadWithModel
{
public FileUpload file;
public UserAssetPreviewModel model;
}
}
}
| |
using J2N;
using System;
using System.Runtime.CompilerServices;
namespace Lucene.Net.Util
{
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* some code derived from jodk: http://code.google.com/p/jodk/ (apache 2.0)
* asin() derived from fdlibm: http://www.netlib.org/fdlibm/e_asin.c (public domain):
* =============================================================================
* Copyright (C) 1993 by Sun Microsystems, Inc. All rights reserved.
*
* Developed at SunSoft, a Sun Microsystems, Inc. business.
* Permission to use, copy, modify, and distribute this
* software is freely granted, provided that this notice
* is preserved.
* =============================================================================
*/
/// <summary>
/// Math functions that trade off accuracy for speed. </summary>
public static class SloppyMath // LUCENENET: Changed to static
{
/// <summary>
/// Returns the distance in kilometers between two points
/// specified in decimal degrees (latitude/longitude). </summary>
/// <param name="lat1"> Latitude of the first point. </param>
/// <param name="lon1"> Longitude of the first point. </param>
/// <param name="lat2"> Latitude of the second point. </param>
/// <param name="lon2"> Longitude of the second point. </param>
/// <returns> distance in kilometers. </returns>
public static double Haversin(double lat1, double lon1, double lat2, double lon2)
{
double x1 = lat1 * TO_RADIANS;
double x2 = lat2 * TO_RADIANS;
double h1 = 1 - Cos(x1 - x2);
double h2 = 1 - Cos((lon1 - lon2) * TO_RADIANS);
double h = (h1 + Cos(x1) * Cos(x2) * h2) / 2;
double avgLat = (x1 + x2) / 2d;
double diameter = EarthDiameter(avgLat);
return diameter * Asin(Math.Min(1, Math.Sqrt(h)));
}
/// <summary>
/// Returns the trigonometric cosine of an angle.
/// <para/>
/// Error is around 1E-15.
/// <para/>
/// Special cases:
/// <list type="bullet">
/// <item><description>If the argument is <see cref="double.NaN"/> or an infinity, then the result is <see cref="double.NaN"/>.</description></item>
/// </list>
/// </summary>
/// <param name="a"> An angle, in radians. </param>
/// <returns> The cosine of the argument. </returns>
/// <seealso cref="Math.Cos(double)"/>
public static double Cos(double a)
{
if (a < 0.0)
{
a = -a;
}
if (a > SIN_COS_MAX_VALUE_FOR_INT_MODULO)
{
return Math.Cos(a);
}
// index: possibly outside tables range.
int index = (int)(a * SIN_COS_INDEXER + 0.5);
double delta = (a - index * SIN_COS_DELTA_HI) - index * SIN_COS_DELTA_LO;
// Making sure index is within tables range.
// Last value of each table is the same than first, so we ignore it (tabs size minus one) for modulo.
index &= (SIN_COS_TABS_SIZE - 2); // index % (SIN_COS_TABS_SIZE-1)
double indexCos = cosTab[index];
double indexSin = sinTab[index];
return indexCos + delta * (-indexSin + delta * (-indexCos * ONE_DIV_F2 + delta * (indexSin * ONE_DIV_F3 + delta * indexCos * ONE_DIV_F4)));
}
/// <summary>
/// Returns the arc sine of a value.
/// <para/>
/// The returned angle is in the range <i>-pi</i>/2 through <i>pi</i>/2.
/// Error is around 1E-7.
/// <para/>
/// Special cases:
/// <list type="bullet">
/// <item><description>If the argument is <see cref="double.NaN"/> or its absolute value is greater than 1, then the result is <see cref="double.NaN"/>.</description></item>
/// </list>
/// </summary>
/// <param name="a"> the value whose arc sine is to be returned. </param>
/// <returns> arc sine of the argument </returns>
/// <seealso cref="Math.Asin(double)"/>
// because asin(-x) = -asin(x), asin(x) only needs to be computed on [0,1].
// ---> we only have to compute asin(x) on [0,1].
// For values not close to +-1, we use look-up tables;
// for values near +-1, we use code derived from fdlibm.
public static double Asin(double a)
{
bool negateResult;
if (a < 0.0)
{
a = -a;
negateResult = true;
}
else
{
negateResult = false;
}
if (a <= ASIN_MAX_VALUE_FOR_TABS)
{
int index = (int)(a * ASIN_INDEXER + 0.5);
double delta = a - index * ASIN_DELTA;
double result = asinTab[index] + delta * (asinDer1DivF1Tab[index] + delta * (asinDer2DivF2Tab[index] + delta * (asinDer3DivF3Tab[index] + delta * asinDer4DivF4Tab[index])));
return negateResult ? -result : result;
} // value > ASIN_MAX_VALUE_FOR_TABS, or value is NaN
else
{
// this part is derived from fdlibm.
if (a < 1.0)
{
double t = (1.0 - a) * 0.5;
double p = t * (ASIN_PS0 + t * (ASIN_PS1 + t * (ASIN_PS2 + t * (ASIN_PS3 + t * (ASIN_PS4 + t * ASIN_PS5)))));
double q = 1.0 + t * (ASIN_QS1 + t * (ASIN_QS2 + t * (ASIN_QS3 + t * ASIN_QS4)));
double s = Math.Sqrt(t);
double z = s + s * (p / q);
double result = ASIN_PIO2_HI - ((z + z) - ASIN_PIO2_LO);
return negateResult ? -result : result;
} // value >= 1.0, or value is NaN
else
{
if (a == 1.0)
{
return negateResult ? -Math.PI / 2 : Math.PI / 2;
}
else
{
return double.NaN;
}
}
}
}
/// <summary>
/// Return an approximate value of the diameter of the earth at the given latitude, in kilometers. </summary>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static double EarthDiameter(double latitude)
{
if(double.IsNaN(latitude))
return double.NaN;
int index = (int)(Math.Abs(latitude) * RADIUS_INDEXER + 0.5) % earthDiameterPerLatitude.Length;
return earthDiameterPerLatitude[index];
}
// haversin
private static readonly double TO_RADIANS = Math.PI / 180D;
// cos/asin
private const double ONE_DIV_F2 = 1 / 2.0;
private const double ONE_DIV_F3 = 1 / 6.0;
private const double ONE_DIV_F4 = 1 / 24.0;
private static readonly double PIO2_HI = J2N.BitConversion.Int64BitsToDouble(0x3FF921FB54400000L); // 1.57079632673412561417e+00 first 33 bits of pi/2
private static readonly double PIO2_LO = J2N.BitConversion.Int64BitsToDouble(0x3DD0B4611A626331L); // 6.07710050650619224932e-11 pi/2 - PIO2_HI
private static readonly double TWOPI_HI = 4 * PIO2_HI;
private static readonly double TWOPI_LO = 4 * PIO2_LO;
private static readonly int SIN_COS_TABS_SIZE = (1 << 11) + 1;
private static readonly double SIN_COS_DELTA_HI = TWOPI_HI / (SIN_COS_TABS_SIZE - 1);
private static readonly double SIN_COS_DELTA_LO = TWOPI_LO / (SIN_COS_TABS_SIZE - 1);
private static readonly double SIN_COS_INDEXER = 1 / (SIN_COS_DELTA_HI + SIN_COS_DELTA_LO);
private static readonly double[] sinTab = new double[SIN_COS_TABS_SIZE];
private static readonly double[] cosTab = new double[SIN_COS_TABS_SIZE];
// Max abs value for fast modulo, above which we use regular angle normalization.
// this value must be < (Integer.MAX_VALUE / SIN_COS_INDEXER), to stay in range of int type.
// The higher it is, the higher the error, but also the faster it is for lower values.
// If you set it to ((Integer.MAX_VALUE / SIN_COS_INDEXER) * 0.99), worse accuracy on double range is about 1e-10.
internal static readonly double SIN_COS_MAX_VALUE_FOR_INT_MODULO = ((int.MaxValue >> 9) / SIN_COS_INDEXER) * 0.99;
// Supposed to be >= sin(77.2deg), as fdlibm code is supposed to work with values > 0.975,
// but seems to work well enough as long as value >= sin(25deg).
private static readonly double ASIN_MAX_VALUE_FOR_TABS = Math.Sin(73.0.ToRadians());
private static readonly int ASIN_TABS_SIZE = (1 << 13) + 1;
private static readonly double ASIN_DELTA = ASIN_MAX_VALUE_FOR_TABS / (ASIN_TABS_SIZE - 1);
private static readonly double ASIN_INDEXER = 1 / ASIN_DELTA;
private static readonly double[] asinTab = new double[ASIN_TABS_SIZE];
private static readonly double[] asinDer1DivF1Tab = new double[ASIN_TABS_SIZE];
private static readonly double[] asinDer2DivF2Tab = new double[ASIN_TABS_SIZE];
private static readonly double[] asinDer3DivF3Tab = new double[ASIN_TABS_SIZE];
private static readonly double[] asinDer4DivF4Tab = new double[ASIN_TABS_SIZE];
private static readonly double ASIN_PIO2_HI = J2N.BitConversion.Int64BitsToDouble(0x3FF921FB54442D18L); // 1.57079632679489655800e+00
private static readonly double ASIN_PIO2_LO = J2N.BitConversion.Int64BitsToDouble(0x3C91A62633145C07L); // 6.12323399573676603587e-17
private static readonly double ASIN_PS0 = J2N.BitConversion.Int64BitsToDouble(0x3fc5555555555555L); // 1.66666666666666657415e-01
private static readonly double ASIN_PS1 = J2N.BitConversion.Int64BitsToDouble(unchecked((long)0xbfd4d61203eb6f7dL)); // -3.25565818622400915405e-01
private static readonly double ASIN_PS2 = J2N.BitConversion.Int64BitsToDouble(0x3fc9c1550e884455L); // 2.01212532134862925881e-01
private static readonly double ASIN_PS3 = J2N.BitConversion.Int64BitsToDouble(unchecked((long)0xbfa48228b5688f3bL)); // -4.00555345006794114027e-02
private static readonly double ASIN_PS4 = J2N.BitConversion.Int64BitsToDouble(0x3f49efe07501b288L); // 7.91534994289814532176e-04
private static readonly double ASIN_PS5 = J2N.BitConversion.Int64BitsToDouble(0x3f023de10dfdf709L); // 3.47933107596021167570e-05
private static readonly double ASIN_QS1 = J2N.BitConversion.Int64BitsToDouble(unchecked((long)0xc0033a271c8a2d4bL)); // -2.40339491173441421878e+00
private static readonly double ASIN_QS2 = J2N.BitConversion.Int64BitsToDouble(0x40002ae59c598ac8L); // 2.02094576023350569471e+00
private static readonly double ASIN_QS3 = J2N.BitConversion.Int64BitsToDouble(unchecked((long)0xbfe6066c1b8d0159L)); // -6.88283971605453293030e-01
private static readonly double ASIN_QS4 = J2N.BitConversion.Int64BitsToDouble(0x3fb3b8c5b12e9282L); // 7.70381505559019352791e-02
private static readonly int RADIUS_TABS_SIZE = (1 << 10) + 1;
private static readonly double RADIUS_DELTA = (Math.PI / 2d) / (RADIUS_TABS_SIZE - 1);
private static readonly double RADIUS_INDEXER = 1d / RADIUS_DELTA;
private static readonly double[] earthDiameterPerLatitude = new double[RADIUS_TABS_SIZE];
/// <summary>
/// Initializes look-up tables. </summary>
static SloppyMath()
{
// sin and cos
int SIN_COS_PI_INDEX = (SIN_COS_TABS_SIZE - 1) / 2;
int SIN_COS_PI_MUL_2_INDEX = 2 * SIN_COS_PI_INDEX;
int SIN_COS_PI_MUL_0_5_INDEX = SIN_COS_PI_INDEX / 2;
int SIN_COS_PI_MUL_1_5_INDEX = 3 * SIN_COS_PI_INDEX / 2;
for (int i = 0; i < SIN_COS_TABS_SIZE; i++)
{
// angle: in [0,2*PI].
double angle = i * SIN_COS_DELTA_HI + i * SIN_COS_DELTA_LO;
double sinAngle = Math.Sin(angle);
double cosAngle = Math.Cos(angle);
// For indexes corresponding to null cosine or sine, we make sure the value is zero
// and not an epsilon. this allows for a much better accuracy for results close to zero.
if (i == SIN_COS_PI_INDEX)
{
sinAngle = 0.0;
}
else if (i == SIN_COS_PI_MUL_2_INDEX)
{
sinAngle = 0.0;
}
else if (i == SIN_COS_PI_MUL_0_5_INDEX)
{
cosAngle = 0.0;
}
else if (i == SIN_COS_PI_MUL_1_5_INDEX)
{
cosAngle = 0.0;
}
sinTab[i] = sinAngle;
cosTab[i] = cosAngle;
}
// asin
for (int i = 0; i < ASIN_TABS_SIZE; i++)
{
// x: in [0,ASIN_MAX_VALUE_FOR_TABS].
double x = i * ASIN_DELTA;
asinTab[i] = Math.Asin(x);
double oneMinusXSqInv = 1.0 / (1 - x * x);
double oneMinusXSqInv0_5 = Math.Sqrt(oneMinusXSqInv);
double oneMinusXSqInv1_5 = oneMinusXSqInv0_5 * oneMinusXSqInv;
double oneMinusXSqInv2_5 = oneMinusXSqInv1_5 * oneMinusXSqInv;
double oneMinusXSqInv3_5 = oneMinusXSqInv2_5 * oneMinusXSqInv;
asinDer1DivF1Tab[i] = oneMinusXSqInv0_5;
asinDer2DivF2Tab[i] = (x * oneMinusXSqInv1_5) * ONE_DIV_F2;
asinDer3DivF3Tab[i] = ((1 + 2 * x * x) * oneMinusXSqInv2_5) * ONE_DIV_F3;
asinDer4DivF4Tab[i] = ((5 + 2 * x * (2 + x * (5 - 2 * x))) * oneMinusXSqInv3_5) * ONE_DIV_F4;
}
// WGS84 earth-ellipsoid major (a) and minor (b) radius
const double a = 6378137; // [m]
const double b = 6356752.31420; // [m]
double a2 = a * a;
double b2 = b * b;
earthDiameterPerLatitude[0] = 2 * a / 1000d;
earthDiameterPerLatitude[RADIUS_TABS_SIZE - 1] = 2 * b / 1000d;
// earth radius
for (int i = 1; i < RADIUS_TABS_SIZE - 1; i++)
{
double lat = Math.PI * i / (2d * RADIUS_TABS_SIZE - 1);
double one = Math.Pow(a2 * Math.Cos(lat), 2);
double two = Math.Pow(b2 * Math.Sin(lat), 2);
double three = Math.Pow(a * Math.Cos(lat), 2);
double four = Math.Pow(b * Math.Sin(lat), 2);
double radius = Math.Sqrt((one + two) / (three + four));
earthDiameterPerLatitude[i] = 2 * radius / 1000d;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
//
// This file was autogenerated by a tool.
// Do not modify it.
//
namespace Microsoft.Azure.Batch
{
using Models = Microsoft.Azure.Batch.Protocol.Models;
using System;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// A job schedule that allows recurring jobs by specifying when to run jobs and a specification used to create each
/// job.
/// </summary>
public partial class CloudJobSchedule : ITransportObjectProvider<Models.JobScheduleAddParameter>, IInheritedBehaviors, IPropertyMetadata
{
private class PropertyContainer : PropertyCollection
{
public readonly PropertyAccessor<DateTime?> CreationTimeProperty;
public readonly PropertyAccessor<string> DisplayNameProperty;
public readonly PropertyAccessor<string> ETagProperty;
public readonly PropertyAccessor<JobScheduleExecutionInformation> ExecutionInformationProperty;
public readonly PropertyAccessor<string> IdProperty;
public readonly PropertyAccessor<JobSpecification> JobSpecificationProperty;
public readonly PropertyAccessor<DateTime?> LastModifiedProperty;
public readonly PropertyAccessor<IList<MetadataItem>> MetadataProperty;
public readonly PropertyAccessor<Common.JobScheduleState?> PreviousStateProperty;
public readonly PropertyAccessor<DateTime?> PreviousStateTransitionTimeProperty;
public readonly PropertyAccessor<Schedule> ScheduleProperty;
public readonly PropertyAccessor<Common.JobScheduleState?> StateProperty;
public readonly PropertyAccessor<DateTime?> StateTransitionTimeProperty;
public readonly PropertyAccessor<JobScheduleStatistics> StatisticsProperty;
public readonly PropertyAccessor<string> UrlProperty;
public PropertyContainer() : base(BindingState.Unbound)
{
this.CreationTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(CreationTime), BindingAccess.None);
this.DisplayNameProperty = this.CreatePropertyAccessor<string>(nameof(DisplayName), BindingAccess.Read | BindingAccess.Write);
this.ETagProperty = this.CreatePropertyAccessor<string>(nameof(ETag), BindingAccess.None);
this.ExecutionInformationProperty = this.CreatePropertyAccessor<JobScheduleExecutionInformation>(nameof(ExecutionInformation), BindingAccess.None);
this.IdProperty = this.CreatePropertyAccessor<string>(nameof(Id), BindingAccess.Read | BindingAccess.Write);
this.JobSpecificationProperty = this.CreatePropertyAccessor<JobSpecification>(nameof(JobSpecification), BindingAccess.Read | BindingAccess.Write);
this.LastModifiedProperty = this.CreatePropertyAccessor<DateTime?>(nameof(LastModified), BindingAccess.None);
this.MetadataProperty = this.CreatePropertyAccessor<IList<MetadataItem>>(nameof(Metadata), BindingAccess.Read | BindingAccess.Write);
this.PreviousStateProperty = this.CreatePropertyAccessor<Common.JobScheduleState?>(nameof(PreviousState), BindingAccess.None);
this.PreviousStateTransitionTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(PreviousStateTransitionTime), BindingAccess.None);
this.ScheduleProperty = this.CreatePropertyAccessor<Schedule>(nameof(Schedule), BindingAccess.Read | BindingAccess.Write);
this.StateProperty = this.CreatePropertyAccessor<Common.JobScheduleState?>(nameof(State), BindingAccess.None);
this.StateTransitionTimeProperty = this.CreatePropertyAccessor<DateTime?>(nameof(StateTransitionTime), BindingAccess.None);
this.StatisticsProperty = this.CreatePropertyAccessor<JobScheduleStatistics>(nameof(Statistics), BindingAccess.None);
this.UrlProperty = this.CreatePropertyAccessor<string>(nameof(Url), BindingAccess.None);
}
public PropertyContainer(Models.CloudJobSchedule protocolObject) : base(BindingState.Bound)
{
this.CreationTimeProperty = this.CreatePropertyAccessor(
protocolObject.CreationTime,
nameof(CreationTime),
BindingAccess.Read);
this.DisplayNameProperty = this.CreatePropertyAccessor(
protocolObject.DisplayName,
nameof(DisplayName),
BindingAccess.Read);
this.ETagProperty = this.CreatePropertyAccessor(
protocolObject.ETag,
nameof(ETag),
BindingAccess.Read);
this.ExecutionInformationProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.ExecutionInfo, o => new JobScheduleExecutionInformation(o).Freeze()),
nameof(ExecutionInformation),
BindingAccess.Read);
this.IdProperty = this.CreatePropertyAccessor(
protocolObject.Id,
nameof(Id),
BindingAccess.Read);
this.JobSpecificationProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.JobSpecification, o => new JobSpecification(o)),
nameof(JobSpecification),
BindingAccess.Read | BindingAccess.Write);
this.LastModifiedProperty = this.CreatePropertyAccessor(
protocolObject.LastModified,
nameof(LastModified),
BindingAccess.Read);
this.MetadataProperty = this.CreatePropertyAccessor(
MetadataItem.ConvertFromProtocolCollection(protocolObject.Metadata),
nameof(Metadata),
BindingAccess.Read | BindingAccess.Write);
this.PreviousStateProperty = this.CreatePropertyAccessor(
UtilitiesInternal.MapNullableEnum<Models.JobScheduleState, Common.JobScheduleState>(protocolObject.PreviousState),
nameof(PreviousState),
BindingAccess.Read);
this.PreviousStateTransitionTimeProperty = this.CreatePropertyAccessor(
protocolObject.PreviousStateTransitionTime,
nameof(PreviousStateTransitionTime),
BindingAccess.Read);
this.ScheduleProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.Schedule, o => new Schedule(o)),
nameof(Schedule),
BindingAccess.Read | BindingAccess.Write);
this.StateProperty = this.CreatePropertyAccessor(
UtilitiesInternal.MapNullableEnum<Models.JobScheduleState, Common.JobScheduleState>(protocolObject.State),
nameof(State),
BindingAccess.Read);
this.StateTransitionTimeProperty = this.CreatePropertyAccessor(
protocolObject.StateTransitionTime,
nameof(StateTransitionTime),
BindingAccess.Read);
this.StatisticsProperty = this.CreatePropertyAccessor(
UtilitiesInternal.CreateObjectWithNullCheck(protocolObject.Stats, o => new JobScheduleStatistics(o).Freeze()),
nameof(Statistics),
BindingAccess.Read);
this.UrlProperty = this.CreatePropertyAccessor(
protocolObject.Url,
nameof(Url),
BindingAccess.Read);
}
}
private PropertyContainer propertyContainer;
private readonly BatchClient parentBatchClient;
#region Constructors
/// <summary>
/// Initializes a new instance of the <see cref="CloudJobSchedule"/> class.
/// </summary>
/// <param name='parentBatchClient'>The parent <see cref="BatchClient"/> to use.</param>
/// <param name='baseBehaviors'>The base behaviors to use.</param>
internal CloudJobSchedule(
BatchClient parentBatchClient,
IEnumerable<BatchClientBehavior> baseBehaviors)
{
this.propertyContainer = new PropertyContainer();
this.parentBatchClient = parentBatchClient;
InheritUtil.InheritClientBehaviorsAndSetPublicProperty(this, baseBehaviors);
}
internal CloudJobSchedule(
BatchClient parentBatchClient,
Models.CloudJobSchedule protocolObject,
IEnumerable<BatchClientBehavior> baseBehaviors)
{
this.parentBatchClient = parentBatchClient;
InheritUtil.InheritClientBehaviorsAndSetPublicProperty(this, baseBehaviors);
this.propertyContainer = new PropertyContainer(protocolObject);
}
#endregion Constructors
#region IInheritedBehaviors
/// <summary>
/// Gets or sets a list of behaviors that modify or customize requests to the Batch service
/// made via this <see cref="CloudJobSchedule"/>.
/// </summary>
/// <remarks>
/// <para>These behaviors are inherited by child objects.</para>
/// <para>Modifications are applied in the order of the collection. The last write wins.</para>
/// </remarks>
public IList<BatchClientBehavior> CustomBehaviors { get; set; }
#endregion IInheritedBehaviors
#region CloudJobSchedule
/// <summary>
/// Gets the creation time of the job schedule.
/// </summary>
public DateTime? CreationTime
{
get { return this.propertyContainer.CreationTimeProperty.Value; }
}
/// <summary>
/// Gets or sets the display name of the job schedule.
/// </summary>
public string DisplayName
{
get { return this.propertyContainer.DisplayNameProperty.Value; }
set { this.propertyContainer.DisplayNameProperty.Value = value; }
}
/// <summary>
/// Gets the ETag of the job schedule.
/// </summary>
public string ETag
{
get { return this.propertyContainer.ETagProperty.Value; }
}
/// <summary>
/// Gets the execution information for the job schedule.
/// </summary>
public JobScheduleExecutionInformation ExecutionInformation
{
get { return this.propertyContainer.ExecutionInformationProperty.Value; }
}
/// <summary>
/// Gets or sets the id of the job schedule.
/// </summary>
public string Id
{
get { return this.propertyContainer.IdProperty.Value; }
set { this.propertyContainer.IdProperty.Value = value; }
}
/// <summary>
/// Gets or sets a <see cref="JobSpecification" /> containing details of the jobs to be created according to the
/// <see cref="Schedule"/>.
/// </summary>
public JobSpecification JobSpecification
{
get { return this.propertyContainer.JobSpecificationProperty.Value; }
set { this.propertyContainer.JobSpecificationProperty.Value = value; }
}
/// <summary>
/// Gets the last modified time of the job schedule.
/// </summary>
public DateTime? LastModified
{
get { return this.propertyContainer.LastModifiedProperty.Value; }
}
/// <summary>
/// Gets or sets a list of name-value pairs associated with the schedule as metadata.
/// </summary>
public IList<MetadataItem> Metadata
{
get { return this.propertyContainer.MetadataProperty.Value; }
set
{
this.propertyContainer.MetadataProperty.Value = ConcurrentChangeTrackedModifiableList<MetadataItem>.TransformEnumerableToConcurrentModifiableList(value);
}
}
/// <summary>
/// Gets the previous state of the job schedule.
/// </summary>
/// <remarks>
/// If the schedule is in its initial <see cref="Common.JobScheduleState.Active"/> state, the PreviousState property
/// is not defined.
/// </remarks>
public Common.JobScheduleState? PreviousState
{
get { return this.propertyContainer.PreviousStateProperty.Value; }
}
/// <summary>
/// Gets the time at which the job schedule entered its previous state.
/// </summary>
/// <remarks>
/// If the schedule is in its initial <see cref="Common.JobScheduleState.Active"/> state, the PreviousStateTransitionTime
/// property is not defined.
/// </remarks>
public DateTime? PreviousStateTransitionTime
{
get { return this.propertyContainer.PreviousStateTransitionTimeProperty.Value; }
}
/// <summary>
/// Gets or sets the schedule that determines when jobs will be created.
/// </summary>
public Schedule Schedule
{
get { return this.propertyContainer.ScheduleProperty.Value; }
set { this.propertyContainer.ScheduleProperty.Value = value; }
}
/// <summary>
/// Gets the current state of the job schedule.
/// </summary>
public Common.JobScheduleState? State
{
get { return this.propertyContainer.StateProperty.Value; }
}
/// <summary>
/// Gets the time at which the <see cref="CloudJobSchedule"/> entered its current state.
/// </summary>
public DateTime? StateTransitionTime
{
get { return this.propertyContainer.StateTransitionTimeProperty.Value; }
}
/// <summary>
/// Gets a <see cref="JobScheduleStatistics" /> containing resource usage statistics for the entire lifetime of the
/// job schedule.
/// </summary>
/// <remarks>
/// This property is populated only if the <see cref="CloudJobSchedule"/> was retrieved with an <see cref="ODATADetailLevel.ExpandClause"/>
/// including the 'stats' attribute; otherwise it is null. The statistics may not be immediately available. The Batch
/// service performs periodic roll-up of statistics. The typical delay is about 30 minutes.
/// </remarks>
public JobScheduleStatistics Statistics
{
get { return this.propertyContainer.StatisticsProperty.Value; }
}
/// <summary>
/// Gets the URL of the job schedule.
/// </summary>
public string Url
{
get { return this.propertyContainer.UrlProperty.Value; }
}
#endregion // CloudJobSchedule
#region IPropertyMetadata
bool IModifiable.HasBeenModified
{
get { return this.propertyContainer.HasBeenModified; }
}
bool IReadOnly.IsReadOnly
{
get { return this.propertyContainer.IsReadOnly; }
set { this.propertyContainer.IsReadOnly = value; }
}
#endregion //IPropertyMetadata
#region Internal/private methods
/// <summary>
/// Return a protocol object of the requested type.
/// </summary>
/// <returns>The protocol object of the requested type.</returns>
Models.JobScheduleAddParameter ITransportObjectProvider<Models.JobScheduleAddParameter>.GetTransportObject()
{
Models.JobScheduleAddParameter result = new Models.JobScheduleAddParameter()
{
DisplayName = this.DisplayName,
Id = this.Id,
JobSpecification = UtilitiesInternal.CreateObjectWithNullCheck(this.JobSpecification, (o) => o.GetTransportObject()),
Metadata = UtilitiesInternal.ConvertToProtocolCollection(this.Metadata),
Schedule = UtilitiesInternal.CreateObjectWithNullCheck(this.Schedule, (o) => o.GetTransportObject()),
};
return result;
}
#endregion // Internal/private methods
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// ------------------------------------------------------------------------------
// Changes to this file must follow the http://aka.ms/api-review process.
// ------------------------------------------------------------------------------
namespace System.Reflection.Emit
{
public sealed partial class AssemblyBuilder : System.Reflection.Assembly
{
internal AssemblyBuilder() { }
public override string FullName { get { throw null; } }
public override bool IsDynamic { get { throw null; } }
public override System.Reflection.Module ManifestModule { get { throw null; } }
public static System.Reflection.Emit.AssemblyBuilder DefineDynamicAssembly(System.Reflection.AssemblyName name, System.Reflection.Emit.AssemblyBuilderAccess access) { throw null; }
public static System.Reflection.Emit.AssemblyBuilder DefineDynamicAssembly(System.Reflection.AssemblyName name, System.Reflection.Emit.AssemblyBuilderAccess access, System.Collections.Generic.IEnumerable<System.Reflection.Emit.CustomAttributeBuilder> assemblyAttributes) { throw null; }
public System.Reflection.Emit.ModuleBuilder DefineDynamicModule(string name) { throw null; }
public override bool Equals(object obj) { throw null; }
public System.Reflection.Emit.ModuleBuilder GetDynamicModule(string name) { throw null; }
public override int GetHashCode() { throw null; }
public override System.Reflection.ManifestResourceInfo GetManifestResourceInfo(string resourceName) { throw null; }
public override string[] GetManifestResourceNames() { throw null; }
public override System.IO.Stream GetManifestResourceStream(string name) { throw null; }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
}
[System.FlagsAttribute]
public enum AssemblyBuilderAccess
{
Run = 1,
RunAndCollect = 9,
}
public sealed partial class ConstructorBuilder : System.Reflection.ConstructorInfo
{
internal ConstructorBuilder() { }
public override System.Reflection.MethodAttributes Attributes { get { throw null; } }
public override System.Reflection.CallingConventions CallingConvention { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public bool InitLocals { get { throw null; } set { } }
public override System.RuntimeMethodHandle MethodHandle { get { throw null; } }
public override System.Reflection.Module Module { get { throw null; } }
public override string Name { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public System.Reflection.Emit.ParameterBuilder DefineParameter(int iSequence, System.Reflection.ParameterAttributes attributes, string strParamName) { throw null; }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public System.Reflection.Emit.ILGenerator GetILGenerator() { throw null; }
public System.Reflection.Emit.ILGenerator GetILGenerator(int streamSize) { throw null; }
public override System.Reflection.MethodImplAttributes GetMethodImplementationFlags() { throw null; }
public override System.Reflection.ParameterInfo[] GetParameters() { throw null; }
public override object Invoke(object obj, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object[] parameters, System.Globalization.CultureInfo culture) { throw null; }
public override object Invoke(System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object[] parameters, System.Globalization.CultureInfo culture) { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetImplementationFlags(System.Reflection.MethodImplAttributes attributes) { }
public override string ToString() { throw null; }
}
public sealed partial class EnumBuilder : System.Type //TYPEINFO: System.Reflection.TypeInfo doesn't have a public ctor
{
internal EnumBuilder() { }
public override System.Reflection.Assembly Assembly { get { throw null; } }
public override string AssemblyQualifiedName { get { throw null; } }
public override System.Type BaseType { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public override string FullName { get { throw null; } }
public override System.Guid GUID { get { throw null; } }
public override bool IsConstructedGenericType { get { throw null; } }
public override System.Reflection.Module Module { get { throw null; } }
public override string Name { get { throw null; } }
public override string Namespace { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public override System.RuntimeTypeHandle TypeHandle { get { throw null; } }
public System.Reflection.Emit.FieldBuilder UnderlyingField { get { throw null; } }
public override System.Type UnderlyingSystemType { get { throw null; } }
public System.Reflection.TypeInfo CreateTypeInfo() { throw null; }
public System.Reflection.Emit.FieldBuilder DefineLiteral(string literalName, object literalValue) { throw null; }
protected override System.Reflection.TypeAttributes GetAttributeFlagsImpl() { throw null; }
protected override System.Reflection.ConstructorInfo GetConstructorImpl(System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Reflection.CallingConventions callConvention, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
public override System.Reflection.ConstructorInfo[] GetConstructors(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public override System.Type GetElementType() { throw null; }
public override System.Type GetEnumUnderlyingType() { throw null; }
public override System.Reflection.EventInfo GetEvent(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.EventInfo[] GetEvents() { throw null; }
public override System.Reflection.EventInfo[] GetEvents(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.FieldInfo GetField(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.FieldInfo[] GetFields(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type GetInterface(string name, bool ignoreCase) { throw null; }
public override System.Reflection.InterfaceMapping GetInterfaceMap(System.Type interfaceType) { throw null; }
public override System.Type[] GetInterfaces() { throw null; }
public override System.Reflection.MemberInfo[] GetMember(string name, System.Reflection.MemberTypes type, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.MemberInfo[] GetMembers(System.Reflection.BindingFlags bindingAttr) { throw null; }
protected override System.Reflection.MethodInfo GetMethodImpl(string name, System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Reflection.CallingConventions callConvention, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
public override System.Reflection.MethodInfo[] GetMethods(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type GetNestedType(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type[] GetNestedTypes(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.PropertyInfo[] GetProperties(System.Reflection.BindingFlags bindingAttr) { throw null; }
protected override System.Reflection.PropertyInfo GetPropertyImpl(string name, System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Type returnType, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
protected override bool HasElementTypeImpl() { throw null; }
public override object InvokeMember(string name, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object target, object[] args, System.Reflection.ParameterModifier[] modifiers, System.Globalization.CultureInfo culture, string[] namedParameters) { throw null; }
protected override bool IsArrayImpl() { throw null; }
//TYPEINFO public override bool IsAssignableFrom(System.Reflection.TypeInfo typeInfo) { throw null; }
protected override bool IsByRefImpl() { throw null; }
protected override bool IsCOMObjectImpl() { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
protected override bool IsPointerImpl() { throw null; }
protected override bool IsPrimitiveImpl() { throw null; }
protected override bool IsValueTypeImpl() { throw null; }
public override System.Type MakeArrayType() { throw null; }
public override System.Type MakeArrayType(int rank) { throw null; }
public override System.Type MakeByRefType() { throw null; }
public override System.Type MakePointerType() { throw null; }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
}
public sealed partial class EventBuilder
{
internal EventBuilder() { }
public void AddOtherMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
public void SetAddOnMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetRaiseMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
public void SetRemoveOnMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
}
public sealed partial class FieldBuilder : System.Reflection.FieldInfo
{
internal FieldBuilder() { }
public override System.Reflection.FieldAttributes Attributes { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public override System.RuntimeFieldHandle FieldHandle { get { throw null; } }
public override System.Type FieldType { get { throw null; } }
public override string Name { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public override object GetValue(object obj) { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
public void SetConstant(object defaultValue) { }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetOffset(int iOffset) { }
public override void SetValue(object obj, object val, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, System.Globalization.CultureInfo culture) { }
}
public sealed partial class GenericTypeParameterBuilder : System.Type //TYPEINFO: System.Reflection.TypeInfo
{
internal GenericTypeParameterBuilder() { }
public override System.Reflection.Assembly Assembly { get { throw null; } }
public override string AssemblyQualifiedName { get { throw null; } }
public override System.Type BaseType { get { throw null; } }
public override bool ContainsGenericParameters { get { throw null; } }
public override System.Reflection.MethodBase DeclaringMethod { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public override string FullName { get { throw null; } }
public override System.Reflection.GenericParameterAttributes GenericParameterAttributes { get { throw null; } }
public override int GenericParameterPosition { get { throw null; } }
public override System.Guid GUID { get { throw null; } }
public override bool IsConstructedGenericType { get { throw null; } }
public override bool IsGenericParameter { get { throw null; } }
public override bool IsGenericType { get { throw null; } }
public override bool IsGenericTypeDefinition { get { throw null; } }
public override System.Reflection.Module Module { get { throw null; } }
public override string Name { get { throw null; } }
public override string Namespace { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public override System.RuntimeTypeHandle TypeHandle { get { throw null; } }
public override System.Type UnderlyingSystemType { get { throw null; } }
public override bool Equals(object o) { throw null; }
protected override System.Reflection.TypeAttributes GetAttributeFlagsImpl() { throw null; }
protected override System.Reflection.ConstructorInfo GetConstructorImpl(System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Reflection.CallingConventions callConvention, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
public override System.Reflection.ConstructorInfo[] GetConstructors(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public override System.Type GetElementType() { throw null; }
public override System.Reflection.EventInfo GetEvent(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.EventInfo[] GetEvents() { throw null; }
public override System.Reflection.EventInfo[] GetEvents(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.FieldInfo GetField(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.FieldInfo[] GetFields(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type[] GetGenericArguments() { throw null; }
public override System.Type GetGenericTypeDefinition() { throw null; }
public override int GetHashCode() { throw null; }
public override System.Type GetInterface(string name, bool ignoreCase) { throw null; }
public override System.Reflection.InterfaceMapping GetInterfaceMap(System.Type interfaceType) { throw null; }
public override System.Type[] GetInterfaces() { throw null; }
public override System.Reflection.MemberInfo[] GetMember(string name, System.Reflection.MemberTypes type, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.MemberInfo[] GetMembers(System.Reflection.BindingFlags bindingAttr) { throw null; }
protected override System.Reflection.MethodInfo GetMethodImpl(string name, System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Reflection.CallingConventions callConvention, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
public override System.Reflection.MethodInfo[] GetMethods(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type GetNestedType(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type[] GetNestedTypes(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.PropertyInfo[] GetProperties(System.Reflection.BindingFlags bindingAttr) { throw null; }
protected override System.Reflection.PropertyInfo GetPropertyImpl(string name, System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Type returnType, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
protected override bool HasElementTypeImpl() { throw null; }
public override object InvokeMember(string name, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object target, object[] args, System.Reflection.ParameterModifier[] modifiers, System.Globalization.CultureInfo culture, string[] namedParameters) { throw null; }
protected override bool IsArrayImpl() { throw null; }
//TYPEINFO: public override bool IsAssignableFrom(System.Reflection.TypeInfo typeInfo) { throw null; }
public override bool IsAssignableFrom(System.Type c) { throw null; }
protected override bool IsByRefImpl() { throw null; }
protected override bool IsCOMObjectImpl() { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
protected override bool IsPointerImpl() { throw null; }
protected override bool IsPrimitiveImpl() { throw null; }
public override bool IsSubclassOf(System.Type c) { throw null; }
protected override bool IsValueTypeImpl() { throw null; }
public override System.Type MakeArrayType() { throw null; }
public override System.Type MakeArrayType(int rank) { throw null; }
public override System.Type MakeByRefType() { throw null; }
public override System.Type MakeGenericType(params System.Type[] typeArguments) { throw null; }
public override System.Type MakePointerType() { throw null; }
public void SetBaseTypeConstraint(System.Type baseTypeConstraint) { }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetGenericParameterAttributes(System.Reflection.GenericParameterAttributes genericParameterAttributes) { }
public void SetInterfaceConstraints(params System.Type[] interfaceConstraints) { }
public override string ToString() { throw null; }
}
public sealed partial class MethodBuilder : System.Reflection.MethodInfo
{
internal MethodBuilder() { }
public override System.Reflection.MethodAttributes Attributes { get { throw null; } }
public override System.Reflection.CallingConventions CallingConvention { get { throw null; } }
public override bool ContainsGenericParameters { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public bool InitLocals { get { throw null; } set { } }
public override bool IsGenericMethod { get { throw null; } }
public override bool IsGenericMethodDefinition { get { throw null; } }
public override System.RuntimeMethodHandle MethodHandle { get { throw null; } }
public override System.Reflection.Module Module { get { throw null; } }
public override string Name { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public override System.Reflection.ParameterInfo ReturnParameter { get { throw null; } }
public override System.Type ReturnType { get { throw null; } }
public override System.Reflection.ICustomAttributeProvider ReturnTypeCustomAttributes { get { throw null; } }
public System.Reflection.Emit.GenericTypeParameterBuilder[] DefineGenericParameters(params string[] names) { throw null; }
public System.Reflection.Emit.ParameterBuilder DefineParameter(int position, System.Reflection.ParameterAttributes attributes, string strParamName) { throw null; }
public override bool Equals(object obj) { throw null; }
public override System.Reflection.MethodInfo GetBaseDefinition() { throw null; }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public override System.Type[] GetGenericArguments() { throw null; }
public override System.Reflection.MethodInfo GetGenericMethodDefinition() { throw null; }
public override int GetHashCode() { throw null; }
public System.Reflection.Emit.ILGenerator GetILGenerator() { throw null; }
public System.Reflection.Emit.ILGenerator GetILGenerator(int size) { throw null; }
public override System.Reflection.MethodImplAttributes GetMethodImplementationFlags() { throw null; }
public override System.Reflection.ParameterInfo[] GetParameters() { throw null; }
public override object Invoke(object obj, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object[] parameters, System.Globalization.CultureInfo culture) { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
public override System.Reflection.MethodInfo MakeGenericMethod(params System.Type[] typeArguments) { throw null; }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetImplementationFlags(System.Reflection.MethodImplAttributes attributes) { }
public void SetParameters(params System.Type[] parameterTypes) { }
public void SetReturnType(System.Type returnType) { }
public void SetSignature(System.Type returnType, System.Type[] returnTypeRequiredCustomModifiers, System.Type[] returnTypeOptionalCustomModifiers, System.Type[] parameterTypes, System.Type[][] parameterTypeRequiredCustomModifiers, System.Type[][] parameterTypeOptionalCustomModifiers) { }
public override string ToString() { throw null; }
}
public partial class ModuleBuilder : System.Reflection.Module
{
internal ModuleBuilder() { }
public override System.Reflection.Assembly Assembly { get { throw null; } }
public override string FullyQualifiedName { get { throw null; } }
public override string Name { get { throw null; } }
public void CreateGlobalFunctions() { }
public System.Reflection.Emit.EnumBuilder DefineEnum(string name, System.Reflection.TypeAttributes visibility, System.Type underlyingType) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineGlobalMethod(string name, System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineGlobalMethod(string name, System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] requiredReturnTypeCustomModifiers, System.Type[] optionalReturnTypeCustomModifiers, System.Type[] parameterTypes, System.Type[][] requiredParameterTypeCustomModifiers, System.Type[][] optionalParameterTypeCustomModifiers) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineGlobalMethod(string name, System.Reflection.MethodAttributes attributes, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public System.Reflection.Emit.FieldBuilder DefineInitializedData(string name, byte[] data, System.Reflection.FieldAttributes attributes) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name, System.Reflection.TypeAttributes attr) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name, System.Reflection.TypeAttributes attr, System.Type parent) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name, System.Reflection.TypeAttributes attr, System.Type parent, int typesize) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name, System.Reflection.TypeAttributes attr, System.Type parent, System.Reflection.Emit.PackingSize packsize) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name, System.Reflection.TypeAttributes attr, System.Type parent, System.Reflection.Emit.PackingSize packingSize, int typesize) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineType(string name, System.Reflection.TypeAttributes attr, System.Type parent, System.Type[] interfaces) { throw null; }
public System.Reflection.Emit.FieldBuilder DefineUninitializedData(string name, int size, System.Reflection.FieldAttributes attributes) { throw null; }
public override bool Equals(object obj) { throw null; }
public System.Reflection.MethodInfo GetArrayMethod(System.Type arrayClass, string methodName, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public override int GetHashCode() { throw null; }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
}
public sealed partial class PropertyBuilder : System.Reflection.PropertyInfo
{
internal PropertyBuilder() { }
public override System.Reflection.PropertyAttributes Attributes { get { throw null; } }
public override bool CanRead { get { throw null; } }
public override bool CanWrite { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public override System.Reflection.Module Module { get { throw null; } }
public override string Name { get { throw null; } }
public override System.Type PropertyType { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public void AddOtherMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
public override System.Reflection.MethodInfo[] GetAccessors(bool nonPublic) { throw null; }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public override System.Reflection.MethodInfo GetGetMethod(bool nonPublic) { throw null; }
public override System.Reflection.ParameterInfo[] GetIndexParameters() { throw null; }
public override System.Reflection.MethodInfo GetSetMethod(bool nonPublic) { throw null; }
public override object GetValue(object obj, object[] index) { throw null; }
public override object GetValue(object obj, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object[] index, System.Globalization.CultureInfo culture) { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
public void SetConstant(object defaultValue) { }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetGetMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
public void SetSetMethod(System.Reflection.Emit.MethodBuilder mdBuilder) { }
public override void SetValue(object obj, object value, object[] index) { }
public override void SetValue(object obj, object value, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object[] index, System.Globalization.CultureInfo culture) { }
}
public sealed partial class TypeBuilder : System.Type //TYPEINFO: System.Reflection.TypeInfo
{
internal TypeBuilder() { }
public const int UnspecifiedTypeSize = 0;
public override System.Reflection.Assembly Assembly { get { throw null; } }
public override string AssemblyQualifiedName { get { throw null; } }
public override System.Type BaseType { get { throw null; } }
public override System.Reflection.MethodBase DeclaringMethod { get { throw null; } }
public override System.Type DeclaringType { get { throw null; } }
public override string FullName { get { throw null; } }
public override System.Reflection.GenericParameterAttributes GenericParameterAttributes { get { throw null; } }
public override int GenericParameterPosition { get { throw null; } }
public override System.Guid GUID { get { throw null; } }
public override bool IsConstructedGenericType { get { throw null; } }
public override bool IsGenericParameter { get { throw null; } }
public override bool IsGenericType { get { throw null; } }
public override bool IsGenericTypeDefinition { get { throw null; } }
public override bool IsSecurityCritical { get { throw null; } }
public override bool IsSecuritySafeCritical { get { throw null; } }
public override bool IsSecurityTransparent { get { throw null; } }
public override System.Reflection.Module Module { get { throw null; } }
public override string Name { get { throw null; } }
public override string Namespace { get { throw null; } }
public System.Reflection.Emit.PackingSize PackingSize { get { throw null; } }
public override System.Type ReflectedType { get { throw null; } }
public int Size { get { throw null; } }
public override System.RuntimeTypeHandle TypeHandle { get { throw null; } }
public override System.Type UnderlyingSystemType { get { throw null; } }
public void AddInterfaceImplementation(System.Type interfaceType) { }
public System.Type CreateType() { throw null; }
public System.Reflection.TypeInfo CreateTypeInfo() { throw null; }
public System.Reflection.Emit.ConstructorBuilder DefineConstructor(System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type[] parameterTypes) { throw null; }
public System.Reflection.Emit.ConstructorBuilder DefineConstructor(System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type[] parameterTypes, System.Type[][] requiredCustomModifiers, System.Type[][] optionalCustomModifiers) { throw null; }
public System.Reflection.Emit.ConstructorBuilder DefineDefaultConstructor(System.Reflection.MethodAttributes attributes) { throw null; }
public System.Reflection.Emit.EventBuilder DefineEvent(string name, System.Reflection.EventAttributes attributes, System.Type eventtype) { throw null; }
public System.Reflection.Emit.FieldBuilder DefineField(string fieldName, System.Type type, System.Reflection.FieldAttributes attributes) { throw null; }
public System.Reflection.Emit.FieldBuilder DefineField(string fieldName, System.Type type, System.Type[] requiredCustomModifiers, System.Type[] optionalCustomModifiers, System.Reflection.FieldAttributes attributes) { throw null; }
public System.Reflection.Emit.GenericTypeParameterBuilder[] DefineGenericParameters(params string[] names) { throw null; }
public System.Reflection.Emit.FieldBuilder DefineInitializedData(string name, byte[] data, System.Reflection.FieldAttributes attributes) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineMethod(string name, System.Reflection.MethodAttributes attributes) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineMethod(string name, System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineMethod(string name, System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineMethod(string name, System.Reflection.MethodAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] returnTypeRequiredCustomModifiers, System.Type[] returnTypeOptionalCustomModifiers, System.Type[] parameterTypes, System.Type[][] parameterTypeRequiredCustomModifiers, System.Type[][] parameterTypeOptionalCustomModifiers) { throw null; }
public System.Reflection.Emit.MethodBuilder DefineMethod(string name, System.Reflection.MethodAttributes attributes, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public void DefineMethodOverride(System.Reflection.MethodInfo methodInfoBody, System.Reflection.MethodInfo methodInfoDeclaration) { }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name, System.Reflection.TypeAttributes attr) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name, System.Reflection.TypeAttributes attr, System.Type parent) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name, System.Reflection.TypeAttributes attr, System.Type parent, int typeSize) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name, System.Reflection.TypeAttributes attr, System.Type parent, System.Reflection.Emit.PackingSize packSize) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name, System.Reflection.TypeAttributes attr, System.Type parent, System.Reflection.Emit.PackingSize packSize, int typeSize) { throw null; }
public System.Reflection.Emit.TypeBuilder DefineNestedType(string name, System.Reflection.TypeAttributes attr, System.Type parent, System.Type[] interfaces) { throw null; }
public System.Reflection.Emit.PropertyBuilder DefineProperty(string name, System.Reflection.PropertyAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public System.Reflection.Emit.PropertyBuilder DefineProperty(string name, System.Reflection.PropertyAttributes attributes, System.Reflection.CallingConventions callingConvention, System.Type returnType, System.Type[] returnTypeRequiredCustomModifiers, System.Type[] returnTypeOptionalCustomModifiers, System.Type[] parameterTypes, System.Type[][] parameterTypeRequiredCustomModifiers, System.Type[][] parameterTypeOptionalCustomModifiers) { throw null; }
public System.Reflection.Emit.PropertyBuilder DefineProperty(string name, System.Reflection.PropertyAttributes attributes, System.Type returnType, System.Type[] parameterTypes) { throw null; }
public System.Reflection.Emit.PropertyBuilder DefineProperty(string name, System.Reflection.PropertyAttributes attributes, System.Type returnType, System.Type[] returnTypeRequiredCustomModifiers, System.Type[] returnTypeOptionalCustomModifiers, System.Type[] parameterTypes, System.Type[][] parameterTypeRequiredCustomModifiers, System.Type[][] parameterTypeOptionalCustomModifiers) { throw null; }
public System.Reflection.Emit.ConstructorBuilder DefineTypeInitializer() { throw null; }
public System.Reflection.Emit.FieldBuilder DefineUninitializedData(string name, int size, System.Reflection.FieldAttributes attributes) { throw null; }
protected override System.Reflection.TypeAttributes GetAttributeFlagsImpl() { throw null; }
public static System.Reflection.ConstructorInfo GetConstructor(System.Type type, System.Reflection.ConstructorInfo constructor) { throw null; }
protected override System.Reflection.ConstructorInfo GetConstructorImpl(System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Reflection.CallingConventions callConvention, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
public override System.Reflection.ConstructorInfo[] GetConstructors(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override object[] GetCustomAttributes(bool inherit) { throw null; }
public override object[] GetCustomAttributes(System.Type attributeType, bool inherit) { throw null; }
public override System.Type GetElementType() { throw null; }
public override System.Reflection.EventInfo GetEvent(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.EventInfo[] GetEvents() { throw null; }
public override System.Reflection.EventInfo[] GetEvents(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.FieldInfo GetField(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public static System.Reflection.FieldInfo GetField(System.Type type, System.Reflection.FieldInfo field) { throw null; }
public override System.Reflection.FieldInfo[] GetFields(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type[] GetGenericArguments() { throw null; }
public override System.Type GetGenericTypeDefinition() { throw null; }
public override System.Type GetInterface(string name, bool ignoreCase) { throw null; }
public override System.Reflection.InterfaceMapping GetInterfaceMap(System.Type interfaceType) { throw null; }
public override System.Type[] GetInterfaces() { throw null; }
public override System.Reflection.MemberInfo[] GetMember(string name, System.Reflection.MemberTypes type, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.MemberInfo[] GetMembers(System.Reflection.BindingFlags bindingAttr) { throw null; }
public static System.Reflection.MethodInfo GetMethod(System.Type type, System.Reflection.MethodInfo method) { throw null; }
protected override System.Reflection.MethodInfo GetMethodImpl(string name, System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Reflection.CallingConventions callConvention, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
public override System.Reflection.MethodInfo[] GetMethods(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type GetNestedType(string name, System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Type[] GetNestedTypes(System.Reflection.BindingFlags bindingAttr) { throw null; }
public override System.Reflection.PropertyInfo[] GetProperties(System.Reflection.BindingFlags bindingAttr) { throw null; }
protected override System.Reflection.PropertyInfo GetPropertyImpl(string name, System.Reflection.BindingFlags bindingAttr, System.Reflection.Binder binder, System.Type returnType, System.Type[] types, System.Reflection.ParameterModifier[] modifiers) { throw null; }
protected override bool HasElementTypeImpl() { throw null; }
public override object InvokeMember(string name, System.Reflection.BindingFlags invokeAttr, System.Reflection.Binder binder, object target, object[] args, System.Reflection.ParameterModifier[] modifiers, System.Globalization.CultureInfo culture, string[] namedParameters) { throw null; }
protected override bool IsArrayImpl() { throw null; }
//TYPEINFO: public override bool IsAssignableFrom(System.Reflection.TypeInfo typeInfo) { throw null; }
public override bool IsAssignableFrom(System.Type c) { throw null; }
protected override bool IsByRefImpl() { throw null; }
protected override bool IsCOMObjectImpl() { throw null; }
public bool IsCreated() { throw null; }
public override bool IsDefined(System.Type attributeType, bool inherit) { throw null; }
protected override bool IsPointerImpl() { throw null; }
protected override bool IsPrimitiveImpl() { throw null; }
public override bool IsSubclassOf(System.Type c) { throw null; }
public override System.Type MakeArrayType() { throw null; }
public override System.Type MakeArrayType(int rank) { throw null; }
public override System.Type MakeByRefType() { throw null; }
public override System.Type MakeGenericType(params System.Type[] typeArguments) { throw null; }
public override System.Type MakePointerType() { throw null; }
public void SetCustomAttribute(System.Reflection.ConstructorInfo con, byte[] binaryAttribute) { }
public void SetCustomAttribute(System.Reflection.Emit.CustomAttributeBuilder customBuilder) { }
public void SetParent(System.Type parent) { }
public override string ToString() { throw null; }
}
}
| |
// GtkSharp.Generation.StructBase.cs - The Structure/Boxed Base Class.
//
// Author: Mike Kestner <[email protected]>
//
// Copyright (c) 2001-2003 Mike Kestner
//
// This program is free software; you can redistribute it and/or
// modify it under the terms of version 2 of the GNU General Public
// License as published by the Free Software Foundation.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
// General Public License for more details.
//
// You should have received a copy of the GNU General Public
// License along with this program; if not, write to the
// Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor,
// Boston, MA 02110-1301
namespace GtkSharp.Generation {
using System;
using System.Collections;
using System.IO;
using System.Text.RegularExpressions;
using System.Xml;
public abstract class StructBase : ClassBase, IManualMarshaler {
new ArrayList fields = new ArrayList ();
bool need_read_native = false;
protected StructBase (XmlElement ns, XmlElement elem) : base (ns, elem)
{
foreach (XmlNode node in elem.ChildNodes) {
if (!(node is XmlElement)) continue;
XmlElement member = (XmlElement) node;
switch (node.Name) {
case "field":
fields.Add (new StructField (member, this));
break;
case "callback":
Statistics.IgnoreCount++;
break;
default:
if (!IsNodeNameHandled (node.Name))
Console.WriteLine ("Unexpected node " + node.Name + " in " + CName);
break;
}
}
}
public override string DefaultValue {
get {
return QualifiedName + ".Zero";
}
}
public override string MarshalType {
get {
return "IntPtr";
}
}
public override string AssignToName {
get { throw new NotImplementedException (); }
}
public override string CallByName ()
{
return "this_as_native";
}
public override string CallByName (string var)
{
return var + "_as_native";
}
public override string FromNative (string var)
{
if (DisableNew)
return var + " == IntPtr.Zero ? " + QualifiedName + ".Zero : (" + QualifiedName + ") System.Runtime.InteropServices.Marshal.PtrToStructure (" + var + ", typeof (" + QualifiedName + "))";
else
return QualifiedName + ".New (" + var + ")";
}
public string AllocNative (string var)
{
return "GLib.Marshaller.StructureToPtrAlloc (" + var + ")";
}
public string ReleaseNative (string var)
{
return "Marshal.FreeHGlobal (" +var + ")";
}
private bool DisableNew {
get {
return Elem.HasAttribute ("disable_new");
}
}
protected new void GenFields (GenerationInfo gen_info)
{
int bitfields = 0;
bool need_field = true;
foreach (StructField field in fields) {
if (field.IsBitfield) {
if (need_field) {
StreamWriter sw = gen_info.Writer;
sw.WriteLine ("\t\tprivate uint _bitfield{0};\n", bitfields++);
need_field = false;
}
} else
need_field = true;
field.Generate (gen_info, "\t\t");
}
}
public override bool Validate ()
{
foreach (StructField field in fields) {
if (!field.Validate ()) {
Console.WriteLine ("in Struct " + QualifiedName);
if (!field.IsPointer)
return false;
}
}
return base.Validate ();
}
public override void Generate (GenerationInfo gen_info)
{
bool need_close = false;
if (gen_info.Writer == null) {
gen_info.Writer = gen_info.OpenStream (Name);
need_close = true;
}
StreamWriter sw = gen_info.Writer;
sw.WriteLine ("namespace " + NS + " {");
sw.WriteLine ();
sw.WriteLine ("\tusing System;");
sw.WriteLine ("\tusing System.Collections;");
sw.WriteLine ("\tusing System.Runtime.InteropServices;");
sw.WriteLine ();
sw.WriteLine ("#region Autogenerated code");
if (IsDeprecated)
sw.WriteLine ("\t[Obsolete]");
sw.WriteLine ("\t[StructLayout(LayoutKind.Sequential)]");
string access = IsInternal ? "internal" : "public";
sw.WriteLine ("\t" + access + " struct " + Name + " {");
sw.WriteLine ();
need_read_native = false;
GenFields (gen_info);
sw.WriteLine ();
GenCtors (gen_info);
GenMethods (gen_info, null, this);
if (need_read_native)
GenReadNative (sw);
if (!need_close)
return;
sw.WriteLine ("#endregion");
AppendCustom(sw, gen_info.CustomDir);
sw.WriteLine ("\t}");
sw.WriteLine ("}");
sw.Close ();
gen_info.Writer = null;
}
protected override void GenCtors (GenerationInfo gen_info)
{
StreamWriter sw = gen_info.Writer;
sw.WriteLine ("\t\tpublic static {0} Zero = new {0} ();", QualifiedName);
sw.WriteLine();
if (!DisableNew) {
sw.WriteLine ("\t\tpublic static " + QualifiedName + " New(IntPtr raw) {");
sw.WriteLine ("\t\t\tif (raw == IntPtr.Zero)");
sw.WriteLine ("\t\t\t\treturn {0}.Zero;", QualifiedName);
sw.WriteLine ("\t\t\treturn ({0}) Marshal.PtrToStructure (raw, typeof ({0}));", QualifiedName);
sw.WriteLine ("\t\t}");
sw.WriteLine ();
}
foreach (Ctor ctor in Ctors)
ctor.IsStatic = true;
base.GenCtors (gen_info);
}
void GenReadNative (StreamWriter sw)
{
sw.WriteLine ("\t\tstatic void ReadNative (IntPtr native, ref {0} target)", QualifiedName);
sw.WriteLine ("\t\t{");
sw.WriteLine ("\t\t\ttarget = New (native);");
sw.WriteLine ("\t\t}");
sw.WriteLine ();
}
public override void Prepare (StreamWriter sw, string indent)
{
sw.WriteLine (indent + "IntPtr this_as_native = System.Runtime.InteropServices.Marshal.AllocHGlobal (System.Runtime.InteropServices.Marshal.SizeOf (this));");
sw.WriteLine (indent + "System.Runtime.InteropServices.Marshal.StructureToPtr (this, this_as_native, false);");
}
public override void Finish (StreamWriter sw, string indent)
{
need_read_native = true;
sw.WriteLine (indent + "ReadNative (this_as_native, ref this);");
sw.WriteLine (indent + "System.Runtime.InteropServices.Marshal.FreeHGlobal (this_as_native);");
}
}
}
| |
using System.Text;
using Microsoft.Extensions.Logging;
using SchemaZen.Library.Models;
using Test.Integration.Helpers;
using Xunit;
using Xunit.Abstractions;
namespace Test.Integration;
[Trait("Category", "Integration")]
public class TableTest {
private readonly TestDbHelper _dbHelper;
private readonly ILogger _logger;
public TableTest(ITestOutputHelper output, TestDbHelper dbHelper) {
_logger = output.BuildLogger();
_dbHelper = dbHelper;
}
[Fact]
public async Task TestExportData() {
var t = new Table("dbo", "Status");
t.Columns.Add(new Column("id", "int", false, null));
t.Columns.Add(new Column("code", "char", 1, false, null));
t.Columns.Add(new Column("description", "varchar", 20, false, null));
t.Columns.Find("id").Identity = new Identity(1, 1);
t.AddConstraint(new Constraint("PK_Status", "PRIMARY KEY", "id"));
await using var testDb = await _dbHelper.CreateTestDbAsync();
await testDb.ExecSqlAsync(t.ScriptCreate());
var dataIn =
@"1 R Ready
2 P Processing
3 F Frozen
";
var filename = Path.GetTempFileName();
var writer = File.AppendText(filename);
writer.Write(dataIn);
writer.Flush();
writer.Close();
t.ImportData(testDb.GetConnString(), filename);
var sw = new StringWriter();
t.ExportData(testDb.GetConnString(), sw);
Assert.Equal(dataIn, sw.ToString());
File.Delete(filename);
}
[Fact]
public async Task TestImportAndExportIgnoringComputedData() {
var t = new Table("dbo", "Status");
t.Columns.Add(new Column("id", "int", false, null));
t.Columns.Add(new Column("code", "char", 1, false, null));
t.Columns.Add(new Column("description", "varchar", 20, false, null));
var computedCol = new Column("computed", "varchar", false, null) {
ComputedDefinition = "code + ' : ' + description"
};
t.Columns.Add(computedCol);
t.Columns.Find("id").Identity = new Identity(1, 1);
t.AddConstraint(new Constraint("PK_Status", "PRIMARY KEY", "id"));
await using var testDb = await _dbHelper.CreateTestDbAsync();
await testDb.ExecSqlAsync(t.ScriptCreate());
var dataIn =
@"1 R Ready
2 P Processing
3 F Frozen
";
var filename = Path.GetTempFileName();
var writer = File.AppendText(filename);
writer.Write(dataIn);
writer.Flush();
writer.Close();
try {
t.ImportData(testDb.GetConnString(), filename);
var sw = new StringWriter();
t.ExportData(testDb.GetConnString(), sw);
Assert.Equal(dataIn, sw.ToString());
} finally {
File.Delete(filename);
}
}
[Fact]
public async Task TestImportAndExportDateTimeWithoutLosePrecision() {
var t = new Table("dbo", "Dummy");
t.Columns.Add(new Column("id", "int", false, null));
t.Columns.Add(new Column("createdTime", "datetime", false, null));
t.Columns.Find("id").Identity = new Identity(1, 1);
t.AddConstraint(new Constraint("PK_Status", "PRIMARY KEY", "id"));
await using var testDb = await _dbHelper.CreateTestDbAsync();
await testDb.ExecSqlAsync(t.ScriptCreate());
var dataIn =
@"1 2017-02-21 11:20:30.1
2 2017-02-22 11:20:30.12
3 2017-02-23 11:20:30.123
";
var filename = Path.GetTempFileName();
var writer = File.AppendText(filename);
writer.Write(dataIn);
writer.Flush();
writer.Close();
try {
t.ImportData(testDb.GetConnString(), filename);
var sw = new StringWriter();
t.ExportData(testDb.GetConnString(), sw);
Assert.Equal(dataIn, sw.ToString());
} finally {
File.Delete(filename);
}
}
[Fact]
public async Task TestImportAndExportNonDefaultSchema() {
var s = new Schema("example", "dbo");
var t = new Table(s.Name, "Example");
t.Columns.Add(new Column("id", "int", false, null));
t.Columns.Add(new Column("code", "char", 1, false, null));
t.Columns.Add(new Column("description", "varchar", 20, false, null));
t.Columns.Find("id").Identity = new Identity(1, 1);
t.AddConstraint(new Constraint("PK_Example", "PRIMARY KEY", "id"));
await using var testDb = await _dbHelper.CreateTestDbAsync();
await testDb.ExecSqlAsync(s.ScriptCreate());
await testDb.ExecSqlAsync(t.ScriptCreate());
var dataIn =
@"1 R Ready
2 P Processing
3 F Frozen
";
var filename = Path.GetTempFileName();
var writer = File.AppendText(filename);
writer.Write(dataIn);
writer.Flush();
writer.Close();
try {
t.ImportData(testDb.GetConnString(), filename);
var sw = new StringWriter();
t.ExportData(testDb.GetConnString(), sw);
Assert.Equal(dataIn, sw.ToString());
} finally {
File.Delete(filename);
}
}
[Fact]
public async Task TestLargeAmountOfRowsImportAndExport() {
var t = new Table("dbo", "TestData");
t.Columns.Add(new Column("test_field", "int", false, null));
t.AddConstraint(
new Constraint("PK_TestData", "PRIMARY KEY", "test_field") {
IndexType = "NONCLUSTERED"
});
t.AddConstraint(
new Constraint("IX_TestData_PK", "INDEX", "test_field") {
// clustered index is required to ensure the row order is the same as what we import
IndexType = "CLUSTERED",
Table = t,
Unique = true
});
await using var testDb = await _dbHelper.CreateTestDbAsync();
await testDb.ExecSqlAsync(t.ScriptCreate());
var filename = Path.GetTempFileName();
var writer = File.CreateText(filename);
var sb = new StringBuilder();
for (var i = 0; i < Table.RowsInBatch * 4.2; i++) {
sb.AppendLine(i.ToString());
writer.WriteLine(i.ToString());
}
writer.Flush();
writer.Close();
var dataIn = sb.ToString();
Assert.Equal(
dataIn,
File.ReadAllText(
filename)); // just prove that the file and the string are the same, to make the next assertion meaningful!
try {
t.ImportData(testDb.GetConnString(), filename);
var sw = new StringWriter();
t.ExportData(testDb.GetConnString(), sw);
Assert.Equal(dataIn, sw.ToString());
} finally {
File.Delete(filename);
}
}
[Fact]
public async Task TestScript() {
//create a table with all known types, script it, and execute the script
var t = new Table("dbo", "AllTypesTest");
t.Columns.Add(new Column("a", "bigint", false, null));
t.Columns.Add(new Column("b", "binary", 50, false, null));
t.Columns.Add(new Column("c", "bit", false, null));
t.Columns.Add(new Column("d", "char", 10, false, null));
t.Columns.Add(new Column("e", "datetime", false, null));
t.Columns.Add(new Column("f", "decimal", 18, 0, false, null));
t.Columns.Add(new Column("g", "float", false, null));
t.Columns.Add(new Column("h", "image", false, null));
t.Columns.Add(new Column("i", "int", false, null));
t.Columns.Add(new Column("j", "money", false, null));
t.Columns.Add(new Column("k", "nchar", 10, false, null));
t.Columns.Add(new Column("l", "ntext", false, null));
t.Columns.Add(new Column("m", "numeric", 18, 0, false, null));
t.Columns.Add(new Column("n", "nvarchar", 50, false, null));
t.Columns.Add(new Column("o", "nvarchar", -1, false, null));
t.Columns.Add(new Column("p", "real", false, null));
t.Columns.Add(new Column("q", "smalldatetime", false, null));
t.Columns.Add(new Column("r", "smallint", false, null));
t.Columns.Add(new Column("s", "smallmoney", false, null));
t.Columns.Add(new Column("t", "sql_variant", false, null));
t.Columns.Add(new Column("u", "text", false, null));
t.Columns.Add(new Column("v", "timestamp", false, null));
t.Columns.Add(new Column("w", "tinyint", false, null));
t.Columns.Add(new Column("x", "uniqueidentifier", false, null));
t.Columns.Add(new Column("y", "varbinary", 50, false, null));
t.Columns.Add(new Column("z", "varbinary", -1, false, null));
t.Columns.Add(
new Column(
"aa",
"varchar",
50,
true,
new Default("DF_AllTypesTest_aa", "'asdf'", false)));
t.Columns.Add(new Column("bb", "varchar", -1, true, null));
t.Columns.Add(new Column("cc", "xml", true, null));
t.Columns.Add(new Column("dd", "hierarchyid", false, null));
await using var testDb = await _dbHelper.CreateTestDbAsync();
await testDb.ExecSqlAsync(t.ScriptCreate());
}
}
| |
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using System;
using System.Collections;
using System.Threading;
namespace Alachisoft.NCache.Common.Threading
{
/// <remarks>
/// The scheduler supports varying scheduling intervals by asking the task
/// every time for its next preferred scheduling interval. Scheduling can
/// either be <i>fixed-delay</i> or <i>fixed-rate</i>.
/// In fixed-delay scheduling, the task's new schedule is calculated
/// as:<br></br>
/// new_schedule = time_task_starts + scheduling_interval
/// <p>
/// In fixed-rate scheduling, the next schedule is calculated as:<br></br>
/// new_schedule = time_task_was_supposed_to_start + scheduling_interval</p>
/// <p>
/// The scheduler internally holds a queue of tasks sorted in ascending order
/// according to their next execution time. A task is removed from the queue
/// if it is cancelled, i.e. if <tt>TimeScheduler.Task.isCancelled()</tt>
/// returns true.
/// </p>
/// <p>
/// Initially, the scheduler is in <tt>SUSPEND</tt>ed mode, <tt>start()</tt>
/// need not be called: if a task is added, the scheduler gets started
/// automatically. Calling <tt>start()</tt> starts the scheduler if it's
/// suspended or stopped else has no effect. Once <tt>stop()</tt> is called,
/// added tasks will not restart it: <tt>start()</tt> has to be called to
/// restart the scheduler.
/// </p>
/// </remarks>
/// <summary>
/// Fixed-delay and fixed-rate single thread scheduler
/// <p><b>Author:</b> Chris Koiak, Bela Ban</p>
/// <p><b>Date:</b> 12/03/2003</p>
/// </summary>
public class TimeScheduler: IDisposable
{
/// <summary>
/// The interface that submitted tasks must implement
/// </summary>
public abstract class Task
{
/// <summary>
/// Returns true if task is cancelled and shouldn't be scheduled again
/// </summary>
/// <returns></returns>
public abstract bool IsCancelled();
/// <summary>
/// Returns true if task can be executed immediately after it is created.
/// <Description>
/// The default behavior of all tasks is that they can be executed immediately after they are added to the time scheduler.
/// However, this behavior can be overridden in the derived implementation.
/// </Description>
/// </summary>
public virtual bool Enabled { get { return true; } }
/// <summary>
/// The next schedule interval
/// </summary>
/// <returns>The next schedule interval</returns>
public abstract long GetNextInterval();
/// <summary>
/// Execute the task
/// </summary>
public abstract void Run();
}
/// <remarks>
/// Needed in case all tasks have been
/// cancelled and we are still waiting on the schedule time of the task
/// at the top
/// </remarks>
/// <summary>Regular wake-up intervals for scheduler</summary>
private const long TICK_INTERVAL = 1000;
enum State
{
/// <summary>State Constant</summary>
RUN = 0,
/// <summary>State Constant</summary>
SUSPEND = 1,
/// <summary>State Constant</summary>
STOPPING = 2,
/// <summary>State Constant</summary>
STOP = 3,
/// <summary>State Constant</summary>
DISPOSED = 4
}
/// <summary>TimeScheduler thread name</summary>
private const String THREAD_NAME = "TimeScheduler.Thread";
/// <summary>The scheduler thread</summary>
private Thread thread = null;
/// <summary>The thread's running state</summary>
private State thread_state = State.SUSPEND;
/// <summary>Time that task queue is empty before suspending the scheduling thread</summary>
private long suspend_interval;
/// <summary>Sorted list of <code>IntTask</code>s </summary>
private EventQueue queue;
/// <summary>
/// Set the thread state to running, create and start the thread
/// </summary>
private void _start()
{
lock(this)
{
if(thread_state != State.DISPOSED)
{
thread_state = State.RUN;
thread = new Thread(new ThreadStart(_run));
thread.Name = THREAD_NAME;
thread.IsBackground = true;
thread.Start();
}
}
}
/// <summary>
/// Restart the suspended thread
/// </summary>
private void _unsuspend()
{
_start();
}
/// <summary>
/// Set the thread state to suspended
/// </summary>
private void _suspend()
{
lock(this)
{
if(thread_state != State.DISPOSED)
{
thread_state = State.SUSPEND;
thread = null;
}
}
}
/// <summary>
/// Set the thread state to stopping
/// </summary>
private void _stopping()
{
lock(this)
{
if(thread_state != State.DISPOSED)
{
thread_state = State.STOPPING;
}
}
}
/// <summary>
/// Set the thread state to stopped
/// </summary>
private void _stop()
{
lock(this)
{
if(thread_state != State.DISPOSED)
{
thread_state = State.STOP;
thread = null;
}
}
}
/// <remarks>
/// Get the first task, if the running time hasn't been
/// reached then wait a bit and retry. Else reschedule the task and then
/// run it.
/// </remarks>
/// <summary>
/// If the task queue is empty, sleep until a task comes in or if slept
/// for too long, suspend the thread.
/// </summary>
private void _run()
{
long elapsedTime;
long interval;
try
{
while(true)
{
lock(this)
{
if (thread == null) return;
}
Task task = null;
bool lockReAcquired = true;
lock(queue)
{
if(queue.IsEmpty)
lockReAcquired = Monitor.Wait(queue, (int)suspend_interval);
if (lockReAcquired)
{
QueuedEvent e = queue.Peek();
if (e != null)
{
lock (e)
{
task = e.Task;
if (task.IsCancelled())
{
queue.Pop();
continue;
}
elapsedTime = e.ElapsedTime;
interval = e.Interval;
if (elapsedTime >= interval)
{
// Reschedule the task
queue.Pop();
if (e.ReQueue())
{
queue.Push(e);
}
}
}
if (elapsedTime < interval)
{
// argument out of range exception, might be fixed with this check.. hopefully
if (interval - elapsedTime > 0)
{
if ((interval - elapsedTime) > Int32.MaxValue)
Monitor.Wait(queue, Int32.MaxValue);
else
Monitor.Wait(queue, (int)(interval - elapsedTime));
}
continue;
}
}
}
}
lock (this)
{
if (queue.IsEmpty && !lockReAcquired)
{
_suspend();
return;
}
}
try
{
if(task != null && task.Enabled)
task.Run();
}
catch(Exception ex)
{
Trace.error("TimeScheduler._run()", ex.ToString());
}
}
}
catch(ThreadInterruptedException ex)
{
Trace.error("TimeScheduler._run()",ex.ToString());
}
}
/// <summary>
/// Create a scheduler that executes tasks in dynamically adjustable
/// intervals
/// </summary>
/// <param name="suspend_interval">
/// The time that the scheduler will wait for
/// at least one task to be placed in the task queue before suspending
/// the scheduling thread
/// </param>
public TimeScheduler(long suspend_interval)
{
queue = new EventQueue();
this.suspend_interval = suspend_interval;
}
/// <summary>
/// Create a scheduler that executes tasks in dynamically adjustable
/// intervals
/// </summary>
public TimeScheduler() : this(2000){}
/// <remarks>
/// <b>Relative Scheduling</b>
/// <tt>true</tt>:<br></br>
/// Task is rescheduled relative to the last time it <i>actually</i>
/// started execution
/// <p>
/// <tt>false</tt>:<br></br>
/// Task is scheduled relative to its <i>last</i> execution schedule. This
/// has the effect that the time between two consecutive executions of
/// the task remains the same.
/// </p>
/// </remarks>
/// <summary>
/// Add a task for execution at adjustable intervals
/// </summary>
/// <param name="t">The task to execute</param>
/// <param name="relative">Use relative scheduling</param>
public void AddTask(Task t, bool relative)
{
long interval;
lock(this)
{
if(thread_state == State.DISPOSED) return;
if((interval = t.GetNextInterval()) < 0) return;
queue.Push(new QueuedEvent(t));
switch(thread_state)
{
case State.RUN: break;
case State.SUSPEND: _unsuspend(); break;
case State.STOPPING: break;
case State.STOP: break;
}
}
}
/// <summary>
/// Add a task for execution at adjustable intervals
/// </summary>
/// <param name="t">The task to execute</param>
public void AddTask(Task t) { AddTask(t, true); }
/// <summary>
/// Start the scheduler, if it's suspended or stopped
/// </summary>
public void Start()
{
lock(this)
{
switch(thread_state)
{
case State.DISPOSED: break;
case State.RUN: break;
case State.SUSPEND: _unsuspend(); break;
case State.STOPPING: break;
case State.STOP: _start(); break;
}
}
}
/// <summary>
/// Stop the scheduler if it's running. Switch to stopped, if it's
/// suspended. Clear the task queue.
/// </summary>
public void Stop()
{
// i. Switch to STOPPING, interrupt thread
// ii. Wait until thread ends
// iii. Clear the task queue, switch to STOPPED,
lock(this)
{
switch(thread_state)
{
case State.RUN: _stopping(); break;
case State.SUSPEND: _stop(); return;
case State.STOPPING: return;
case State.STOP: return;
case State.DISPOSED: return;
}
thread.Interrupt();
}
thread.Join();
lock(this)
{
queue.Clear();
_stop();
}
}
/// <summary>
/// Performs application-defined tasks associated with freeing, releasing, or
/// resetting unmanaged resources.
/// </summary>
public virtual void Dispose()
{
Thread tmp = null;
lock(this)
{
if(thread_state == State.DISPOSED) return;
tmp = thread;
thread_state = State.DISPOSED;
thread = null;
if(tmp != null)
{
tmp.Interrupt();
}
}
if(tmp != null)
{
tmp.Join();
queue.Clear();
}
}
}
}
| |
/*
| Version 10.1.84
| Copyright 2013 Esri
|
| Licensed under the Apache License, Version 2.0 (the "License");
| you may not use this file except in compliance with the License.
| You may obtain a copy of the License at
|
| http://www.apache.org/licenses/LICENSE-2.0
|
| Unless required by applicable law or agreed to in writing, software
| distributed under the License is distributed on an "AS IS" BASIS,
| WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
| See the License for the specific language governing permissions and
| limitations under the License.
*/
using System;
using System.Diagnostics;
using System.Globalization;
using System.Text;
using ESRI.ArcLogistics.Routing.Json;
using ESRI.ArcLogistics.Services;
using ESRI.ArcLogistics.Utility;
namespace ESRI.ArcLogistics.Routing
{
/// <summary>
/// RestVrpService class.
/// </summary>
internal class RestVrpService : IVrpRestService
{
#region constructors
/// <summary>
/// Initializes a new instance of the RestVrpService class.
/// </summary>
/// <param name="syncContext">REST request context instance to be used for
/// communicating with the syncronous VRP rest service.</param>
/// <param name="asyncContext">REST request context instance to be used for
/// communicating with the asyncronous VRP rest service.</param>
/// <param name="soapUrl">SOAP GP service url.</param>
public RestVrpService(
RestServiceContext syncContext,
RestServiceContext asyncContext,
string soapUrl)
{
Debug.Assert(asyncContext != null);
Debug.Assert(!string.IsNullOrEmpty(soapUrl));
// synchronous context could be null if synchronous VRP support
// was disabled.
_syncContext = syncContext;
// SyncVrpResponse contains array of objects of different types.
// WCF DataContractJsonSerializer cannot deserialize such objects
// without special type hints, so we do some pre-processing of
// original JSON string.
_syncRestService = new RestService(JsonProcHelper.AddJsonTypeInfo);
_context = asyncContext.Context;
_soapGPService = new GPServiceClient(
soapUrl,
_context.Connection);
_baseUrl = asyncContext.Url;
_restService = new RestService();
}
#endregion constructors
#region IVrpRestService Members
/// <summary>
/// Executes VRP job synchronously.
/// </summary>
/// <param name="request">The reference to the request object to be
/// send to the server.</param>
/// <returns>Result of the synchronous job execution.</returns>
/// <exception cref="T:ESRI.ArcLogistics.Routing.RestException">error was
/// returned by the REST API.</exception>
/// <exception cref="T:ESRI.ArcLogistics.CommunicationException">failed
/// to communicate with the REST VRP service.</exception>
public SyncVrpResponse ExecuteJob(SubmitVrpJobRequest request)
{
Debug.Assert(request != null);
Debug.Assert(_syncContext != null);
var url = UriHelper.Concat(_syncContext.Url, QUERY_OBJ_EXECUTE_TASK);
var query = RestHelper.BuildQueryString(
request,
_syncContext.Context.KnownTypes,
false);
var options = new HttpRequestOptions()
{
Method = HttpMethod.Post,
UseGZipEncoding = true,
Timeout = EXEC_TASK_TIMEOUT,
};
return _syncRestService.SendRequest<SyncVrpResponse>(
_syncContext.Context,
url,
query,
options);
}
/// <summary>
/// Submits job to the VRP service.
/// </summary>
/// <param name="request">Request describing job to be submitted.</param>
/// <returns>Result of the job submitting.</returns>
/// <exception cref="T:ESRI.ArcLogistics.Routing.RestException">error was
/// returned by the REST API.</exception>
/// <exception cref="T:ESRI.ArcLogistics.CommunicationException">failed
/// to communicate with the REST VRP service.</exception>
public GetVrpJobResultResponse SubmitJob(SubmitVrpJobRequest request)
{
Debug.Assert(request != null);
var url = UriHelper.Concat(_baseUrl, QUERY_OBJ_SUBMIT_JOB);
var query = RestHelper.BuildQueryString(
request,
_context.KnownTypes,
false);
// log request
Logger.Info(String.Format(MSG_SUBMIT_JOB, request.OperationType,
request.OperationDate.ToString("d"),
query));
HttpRequestOptions opt = new HttpRequestOptions();
opt.Method = HttpMethod.Post;
opt.UseGZipEncoding = true;
opt.Timeout = DEFAULT_REQ_TIMEOUT;
return _restService.SendRequest<GetVrpJobResultResponse>(
_context,
url,
query,
opt);
}
public GetVrpJobResultResponse GetJobResult(string jobId)
{
Debug.Assert(jobId != null);
var getJobResult = string.Format(
CultureInfo.InvariantCulture,
URL_JOB_STATUS,
jobId);
var url = UriHelper.Concat(_baseUrl, getJobResult);
StringBuilder sb = new StringBuilder();
RestHelper.AddQueryParam(QUERY_FORMAT, NAOutputFormat.JSON, sb, true);
string query = sb.ToString();
HttpRequestOptions opt = new HttpRequestOptions();
opt.Method = HttpMethod.Get;
opt.UseGZipEncoding = true;
opt.Timeout = DEFAULT_REQ_TIMEOUT;
return _restService.SendRequest<GetVrpJobResultResponse>(
_context,
url,
query,
opt);
}
public T GetGPObject<T>(
string jobId,
string objectUrl)
{
Debug.Assert(jobId != null);
Debug.Assert(objectUrl != null);
var getObject = string.Format(
CultureInfo.InvariantCulture,
URL_GP_OBJECT,
jobId,
objectUrl);
var url = UriHelper.Concat(_baseUrl, getObject);
StringBuilder sb = new StringBuilder();
RestHelper.AddQueryParam(QUERY_FORMAT, NAOutputFormat.JSON, sb, true);
string query = sb.ToString();
HttpRequestOptions opt = new HttpRequestOptions();
opt.Method = HttpMethod.Get;
opt.UseGZipEncoding = true;
opt.Timeout = DEFAULT_REQ_TIMEOUT;
return _restService.SendRequest<T>(_context, url, query, opt);
}
/// <summary>
/// Cancels VRP job with the specified ID.
/// </summary>
/// <param name="jobId">ID of the job to be cancelled.</param>
/// <exception cref="T:ESRI.ArcLogistics.Routing.RestException">error was
/// returned by the REST API.</exception>
/// <exception cref="T:ESRI.ArcLogistics.CommunicationException">failed
/// to communicate with the REST VRP service.</exception>
public void CancelJob(string jobId)
{
_soapGPService.CancelJob(jobId);
}
#endregion
#region IDisposable Members
/// <summary>
/// Closes VRP service client.
/// </summary>
public void Dispose()
{
_soapGPService.Close();
}
#endregion
#region private constants
///////////////////////////////////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////////////////////
// URL query objects
private const string QUERY_OBJ_SUBMIT_JOB = "submitJob";
/// <summary>
/// Name of the synchronous task.
/// </summary>
private const string QUERY_OBJ_EXECUTE_TASK = "execute";
// URL templates
private const string URL_JOB_STATUS = "jobs/{0}";
private const string URL_GP_OBJECT = "jobs/{0}/{1}";
// URL query parameters
private const string QUERY_FORMAT = "f";
// log messages
private const string MSG_SUBMIT_JOB = "Request to the VRP service for {0} operation on {1}:\n{2}";
// timeouts (milliseconds)
private const int DEFAULT_REQ_TIMEOUT = 10 * 60 * 1000;
/// <summary>
/// Timeout in milliseconds for the "execute" task.
/// </summary>
private const int EXEC_TASK_TIMEOUT = 10 * 60 * 1000;
#endregion
#region private constants
/// <summary>
/// Request context to be used for communicating with the VRP service.
/// </summary>
private IRestRequestContext _context;
/// <summary>
/// SOAP GP service instance to be used for jobs cancellation.
/// </summary>
private GPServiceClient _soapGPService;
/// <summary>
/// Url to the VRP REST service.
/// </summary>
private string _baseUrl;
/// <summary>
/// The reference to the rest service object to be used for sending requests.
/// </summary>
private RestService _restService;
/// <summary>
/// The reference to the context object for synchronous VRP service.
/// </summary>
private RestServiceContext _syncContext;
/// <summary>
/// The reference to the rest service object to be used for sending requests
/// to the synchronous GP service.
/// </summary>
private RestService _syncRestService;
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
//
/**
* Description:
* Mainly stresses the GC by creating n threads each manipulating its own local binary tree.
* Differs from thdtree in a way that the nodes of the binary trees grow during the lifetime.
*/
namespace DefaultNamespace {
using System.Threading;
using System;
using System.IO;
public enum TreeType
{
Normal,
Growing,
Living
}
public class Node
{
internal int m_data;
internal Node m_pLeft;
internal Node m_pRight;
internal byte[] m_aMem;
internal bool Switch;
internal int m_iCount;
public Node()
{
m_aMem = new byte[10];
m_aMem[0] = (byte)10;
m_aMem[9] = (byte)10;
}
public void Live()
{
if (Switch)
{
m_aMem = new byte[1000];
m_aMem[0] = (byte) 10;
m_aMem[999] = (byte) 10;
}
else
{
m_aMem = new byte[10];
m_aMem[0] = (byte) 10;
m_aMem[9] = (byte) 10;
}
Switch = !Switch;
}
public void Grow()
{
m_aMem = new byte[(m_iCount+=100)];
m_aMem[0] = (byte) 10;
m_aMem[m_iCount-1] = (byte) 10;
}
}
public class BinTree
{
internal Node m_pRoot;
internal Random m_Random;
internal TreeType m_TreeType;
public BinTree(int ThreadId, TreeType treeType)
{
m_TreeType = treeType;
m_pRoot = null;
m_Random = new Random();
}
public void Empty (int ThreadId)
{
Console.Out.WriteLine("Thread " + ThreadId + ": Tree Empty");
m_pRoot = null;
}
public void AddNodes (int howMany, int ThreadId)
{
for (int i = 0; i < howMany; i++)
{
m_pRoot = Insert(m_pRoot, m_Random.Next(100));
}
Console.Out.WriteLine("Thread " + ThreadId + " Added: " + howMany + " Nodes: " + GC.GetTotalMemory(false));
}
public void DeleteNodes (int howMany, int ThreadId)
{
for (int i = 0; i < howMany; i++)
{
m_pRoot = Delete(m_pRoot, m_Random.Next(100) );
}
Console.Out.WriteLine("Thread " + ThreadId +" Deleted: " + howMany + " Nodes: " + GC.GetTotalMemory(false));
}
public Node Insert(Node root, int element)
{
if(root == null) //if is NULL make a new node
{ //and copy number to the new node
root=new Node(); //make new node
root.m_data = element; //copy number
root.m_pLeft=null ; //set the children to NULL
root.m_pRight=null;
}
else if(element < root.m_data)
{
root.m_pLeft = Insert(root.m_pLeft, element);
}
else
{
root.m_pRight = Insert(root.m_pRight, element);
}
if (m_TreeType==TreeType.Growing)
{
root.Grow();
}
else if (m_TreeType==TreeType.Living)
{
root.Live();
}
return root;
}
public Node Delete(Node root, int element)
{
Node temp = null;
if (root == null)
{
return null; //Node not found
}
else if (element == root.m_data) //if it was the first data (node)
{
if(root.m_pRight == null) //check if it has right child.
{ //If it has no right child
return root.m_pLeft;
}
if (root.m_pLeft == null)
{
return root.m_pRight;
}
else
{
for (temp = root.m_pLeft; temp.m_pRight != null; temp = temp.m_pRight);
root.m_data = temp.m_data;
root.m_pLeft = Delete(root.m_pLeft, temp.m_data);
}
}
else if (root.m_data > element)
{
root.m_pLeft = Delete(root.m_pLeft, element);
}
else
{
root.m_pRight = Delete(root.m_pRight, element);
}
if (m_TreeType==TreeType.Growing)
{
root.Grow();
}
else if (m_TreeType==TreeType.Living)
{
root.Live();
}
return root;
}
}
public class TreeThread
{
internal int[] mA_Count;
internal int m_id = 0;
internal BinTree m_BinTree;
internal Thread Mv_Thread;
public TreeThread(int ThreadId, TreeType treeType, int[] count)
{
mA_Count = count;
m_BinTree = new BinTree(ThreadId, treeType);
m_id = ThreadId;
Mv_Thread = new Thread( new ThreadStart(this.ThreadStart));
Mv_Thread.Start( );
Console.Out.WriteLine("Started Thread: " + m_id);
}
public void ThreadStart()
{ //All threads start here
for (int i = 0; i < mA_Count.Length; i++)
{
if (mA_Count[i] == 0)
{
m_BinTree.Empty(m_id);
}
else if (mA_Count[i] > 0 )
{
m_BinTree.AddNodes(mA_Count[i], m_id);
}
else
{
m_BinTree.DeleteNodes((mA_Count[i] * -1), m_id);
}
}
}
}
public class ThdTreeGrowingObj
{
public static int Main (System.String[] Args)
{
int iNofThread = 0;
if (Args.Length == 1)
{
if (!Int32.TryParse( Args[0], out iNofThread ))
{
iNofThread = 2;
}
}
else
{
iNofThread = 2;
}
int[] count = {300, 1000, -350, 0, 71, 200};
TreeThread Mv_TreeThread;
for (int i = 0; i < iNofThread; i++)
{
Mv_TreeThread = new TreeThread(i, TreeType.Growing, count); //Each treethread object launches a thread
}
return 100;
}
}
}
| |
/********************************************************************
The Multiverse Platform is made available under the MIT License.
Copyright (c) 2012 The Multiverse Foundation
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify,
merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software
is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
OR OTHER DEALINGS IN THE SOFTWARE.
*********************************************************************/
/***************************************************************************
Copyright (c) Microsoft Corporation. All rights reserved.
This code is licensed under the Visual Studio SDK license terms.
THIS CODE IS PROVIDED *AS IS* WITHOUT WARRANTY OF
ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING ANY
IMPLIED WARRANTIES OF FITNESS FOR A PARTICULAR
PURPOSE, MERCHANTABILITY, OR NON-INFRINGEMENT.
***************************************************************************/
using System;
using System.Collections.Generic;
using System.ComponentModel.Design;
using System.Reflection;
using System.Runtime.InteropServices;
// Platform references
using Microsoft.VisualStudio.OLE.Interop;
using Microsoft.VisualStudio.Shell.Interop;
using Microsoft.VisualStudio.TextManager.Interop;
using Microsoft.VisualStudio.Shell;
// IronPython namespaces.
using Microsoft.Samples.VisualStudio.IronPythonLanguageService;
using IronPython.Hosting;
// Unit test framework.
using Microsoft.VsSDK.UnitTestLibrary;
using Microsoft.VisualStudio.TestTools.UnitTesting;
// Namespace of the class to test
using Microsoft.Samples.VisualStudio.IronPythonInterfaces;
using Microsoft.Samples.VisualStudio.IronPythonConsole;
namespace Microsoft.Samples.VisualStudio.IronPythonConsole.UnitTest
{
public class TestConsoleException : Exception
{
}
/// <summary>
/// Summary description for ConsoleWindowTest
/// </summary>
[TestClass]
public class ConsoleWindowTest
{
[TestMethod]
public void WindowConstructorNullProvider()
{
// Verify that the constructor throws a ArgumentNull exception if the
// service provider is not set.
bool exceptionThrown = false;
try
{
object consoleWindow = CommandWindowHelper.CreateConsoleWindow(null);
}
catch (ArgumentNullException)
{
exceptionThrown = true;
}
catch (System.Reflection.TargetInvocationException e)
{
ArgumentNullException inner = e.InnerException as ArgumentNullException;
if (null != inner)
exceptionThrown = true;
}
Assert.IsTrue(exceptionThrown);
}
[TestMethod]
public void WindowConstructor()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock object for the text buffer.
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
// Create a new local registry class.
LocalRegistryMock mockRegistry = new LocalRegistryMock();
// Add the text buffer to the list of the classes that local registry can create.
mockRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
provider.AddService(typeof(SLocalRegistry), mockRegistry, false);
// Now create the object and verify that the constructor sets the site for the text buffer.
using (IDisposable consoleObject = CommandWindowHelper.CreateConsoleWindow(provider) as IDisposable)
{
Assert.IsNotNull(consoleObject);
Assert.IsTrue(0 < textLinesMock.FunctionCalls(string.Format("{0}.{1}", typeof(IObjectWithSite).FullName, "SetSite")));
}
}
}
[TestMethod]
public void StandardConstructor()
{
using (OleServiceProvider provider = OleServiceProvider.CreateOleServiceProviderWithBasicServices())
{
IVsPackage package = null;
try
{
// Create a mock object for the text buffer.
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
// Create a new local registry class.
LocalRegistryMock mockRegistry = new LocalRegistryMock();
// Add the text buffer to the list of the classes that local registry can create.
mockRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
provider.AddService(typeof(SLocalRegistry), mockRegistry, false);
// Now create a package object and site it.
package = new PythonConsolePackage() as IVsPackage;
package.SetSite(provider);
// Create a console window using the standard constructor and verify that the
// text buffer is created and sited.
using (IDisposable consoleObject = CommandWindowHelper.CreateConsoleWindow() as IDisposable)
{
Assert.IsTrue(0 < textLinesMock.FunctionCalls(string.Format("{0}.{1}", typeof(IObjectWithSite).FullName, "SetSite")));
}
}
finally
{
if (null != package)
{
package.SetSite(null);
package.Close();
}
}
}
}
[TestMethod]
public void WindowPaneImplementation()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
BaseMock textViewMock = MockFactories.TextViewFactory.GetInstance();
mockLocalRegistry.AddClass(typeof(VsTextViewClass), textViewMock);
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
// Create the tool window.
using (IDisposable disposableObject = CommandWindowHelper.CreateConsoleWindow(provider) as IDisposable)
{
IVsWindowPane windowPane = disposableObject as IVsWindowPane;
Assert.IsNotNull(windowPane);
// Now call the IVsWindowPane's methods and check that they are redirect to
// the implementation provided by the text view.
IntPtr newHwnd;
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.CreatePaneWindow(IntPtr.Zero, 0, 0, 0, 0, out newHwnd)));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "CreatePaneWindow")));
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.GetDefaultSize(null)));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "GetDefaultSize")));
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.LoadViewState(null)));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "LoadViewState")));
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.SaveViewState(null)));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "SaveViewState")));
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.SetSite(null)));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "SetSite")));
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.TranslateAccelerator(null)));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "TranslateAccelerator")));
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.ClosePane()));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsWindowPane).FullName, "ClosePane")));
}
// Verify that the text view is closed after Dispose is called on the window pane.
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsTextView).FullName, "CloseView")));
}
}
private static void TextViewSetSiteCallback(object sender, CallbackArgs args)
{
Assert.IsNotNull(args.GetParameter(0));
}
private static void TextViewInitializeCallback(object sender, CallbackArgs args)
{
BaseMock mock = (BaseMock)sender;
// Verify that the view is sited and that a text buffer is provided.
Assert.IsTrue(1 == mock.FunctionCalls(string.Format("{0}.{1}", typeof(IObjectWithSite), "SetSite")));
IVsTextLines textLines = args.GetParameter(0) as IVsTextLines;
Assert.IsNotNull(textLines);
// This text view is not supposed to be initialized using a parent window.
Assert.IsTrue(IntPtr.Zero == (IntPtr)args.GetParameter(1));
args.ReturnValue = Microsoft.VisualStudio.VSConstants.S_OK;
}
[TestMethod]
public void TextViewCreation()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
// Define the mock object for the text view.
BaseMock textViewMock = MockFactories.TextViewFactory.GetInstance();
textViewMock.AddMethodCallback(string.Format("{0}.{1}", typeof(IObjectWithSite).FullName, "SetSite"),
new EventHandler<CallbackArgs>(TextViewSetSiteCallback));
textViewMock.AddMethodCallback(string.Format("{0}.{1}", typeof(IVsTextView).FullName, "Initialize"),
new EventHandler<CallbackArgs>(TextViewInitializeCallback));
mockLocalRegistry.AddClass(typeof(VsTextViewClass), textViewMock);
// Add the local registry to the list of services.
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
// Create the tool window.
using (IDisposable disposableObject = CommandWindowHelper.CreateConsoleWindow(provider) as IDisposable)
{
IVsWindowPane windowPane = disposableObject as IVsWindowPane;
Assert.IsNotNull(windowPane);
// Call the CreatePaneWindow method that will force the creation of the text view.
IntPtr newHwnd;
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
windowPane.CreatePaneWindow(IntPtr.Zero, 0, 0, 0, 0, out newHwnd)));
// Verify that the text view was used as expected.
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IObjectWithSite), "SetSite")));
Assert.IsTrue(1 == textViewMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsTextView), "Initialize")));
}
}
}
[TestMethod]
public void ViewCreationWithLanguage()
{
using (OleServiceProvider provider = OleServiceProvider.CreateOleServiceProviderWithBasicServices())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
// The buffer have to handle a few of connection points in order to enable the
// creation of a Source object from the language service.
ConnectionPointHelper.AddConnectionPointsToContainer(
textLinesMock,
new Type[] { typeof(IVsFinalTextChangeCommitEvents), typeof(IVsTextLinesEvents), typeof(IVsUserDataEvents) });
// Create the local registry mock and add the text buffer to it.
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
// Define the mock object for the text view.
BaseMock textViewMock = MockFactories.TextViewFactory.GetInstance();
// Create the connection point for IVsTextViewEvents (needed for the language service).
ConnectionPointHelper.AddConnectionPointsToContainer(textViewMock, new Type[] { typeof(IVsTextViewEvents) });
// Add the text view to the local registry.
mockLocalRegistry.AddClass(typeof(VsTextViewClass), textViewMock);
MockPackage package = new MockPackage();
((IVsPackage)package).SetSite(provider);
provider.AddService(typeof(Microsoft.VisualStudio.Shell.Package), package, true);
// Create the language service and add it to the list of services.
PythonLanguage language = new MockLanguage();
provider.AddService(typeof(PythonLanguage), language, true);
language.SetSite(provider);
// We need to add a method tip window to the local registry in order to create
// a Source object.
IVsMethodTipWindow methodTip = MockFactories.MethodTipFactory.GetInstance() as IVsMethodTipWindow;
mockLocalRegistry.AddClass(typeof(VsMethodTipWindowClass), methodTip);
// Create a mock expansion manager that is needed for the language service.
BaseMock expansionManager = MockFactories.ExpansionManagerFactory.GetInstance();
ConnectionPointHelper.AddConnectionPointsToContainer(expansionManager, new Type[] { typeof(IVsExpansionEvents) });
Assembly asm = typeof(Microsoft.VisualStudio.Package.LanguageService).Assembly;
Type expMgrType = asm.GetType("Microsoft.VisualStudio.Package.SVsExpansionManager");
provider.AddService(expMgrType, expansionManager, false);
// Add the local registry to the list of services.
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
using (ToolWindowPane windowPane = CommandWindowHelper.CreateConsoleWindow(provider) as ToolWindowPane)
{
Assert.IsNotNull(windowPane);
// Call the CreatePaneWindow method that will force the creation of the text view.
IntPtr newHwnd;
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
((IVsWindowPane)windowPane).CreatePaneWindow(IntPtr.Zero, 0, 0, 0, 0, out newHwnd)));
// Verify that the language service contains a special view for this text view.
FieldInfo specialSourcesField = typeof(PythonLanguage).GetField("specialSources", BindingFlags.Instance | BindingFlags.NonPublic);
Assert.IsNotNull(specialSourcesField);
Dictionary<IVsTextView, PythonSource> specialSources =
(Dictionary<IVsTextView, PythonSource>)specialSourcesField.GetValue(language);
PythonSource source;
Assert.IsTrue(specialSources.TryGetValue(textViewMock as IVsTextView, out source));
Assert.IsNotNull(source);
// Set ColorState to null so that Dispose will not call Marshal.ReleaseComObject on it.
source.ColorState = null;
}
}
}
[TestMethod]
public void EngineInitialization()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.CreateBufferWithMarker();
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
// Create a mock engine provider.
BaseMock mockEngineProvider = MockFactories.EngineProviderFactory.GetInstance();
// Create a mock engine.
BaseMock mockEngine = MockFactories.CreateStandardEngine();
// Set this engine as the one returned from the GetSharedEngine of the engine provider.
mockEngineProvider.AddMethodReturnValues(
string.Format("{0}.{1}", typeof(IPythonEngineProvider), "GetSharedEngine"),
new object[] { (IEngine)mockEngine });
// Add the engine provider to the list of the services.
provider.AddService(typeof(IPythonEngineProvider), mockEngineProvider, false);
// Create the console window
using (IDisposable disposableObject = CommandWindowHelper.CreateConsoleWindow(provider) as IDisposable)
{
IVsWindowPane windowPane = disposableObject as IVsWindowPane;
Assert.IsNotNull(windowPane);
// Verify that the shared engine was get.
Assert.IsTrue(1 == mockEngineProvider.FunctionCalls(string.Format("{0}.{1}", typeof(IPythonEngineProvider), "GetSharedEngine")));
Assert.IsTrue(1 == mockEngine.FunctionCalls(string.Format("{0}.{1}", typeof(IEngine), "set_StdErr")));
Assert.IsTrue(1 == mockEngine.FunctionCalls(string.Format("{0}.{1}", typeof(IEngine), "set_StdOut")));
}
}
}
private static void SetEngineStdErr(object sender, CallbackArgs args)
{
BaseMock mock = (BaseMock)sender;
mock["StdErr"] = args.GetParameter(0);
}
private static void SetEngineStdOut(object sender, CallbackArgs args)
{
BaseMock mock = (BaseMock)sender;
mock["StdOut"] = args.GetParameter(0);
}
private static void ReplaceLinesCallback(object sender, CallbackArgs args)
{
IntPtr stringPointer = (IntPtr)args.GetParameter(4);
int stringLen = (int)args.GetParameter(5);
Assert.IsTrue(IntPtr.Zero != stringPointer);
Assert.IsTrue(stringLen > 0);
string newText = Marshal.PtrToStringAuto(stringPointer, stringLen);
BaseMock mock = (BaseMock)sender;
mock["Text"] = (string)mock["Text"] + newText;
args.ReturnValue = Microsoft.VisualStudio.VSConstants.S_OK;
}
[TestMethod]
public void EngineStreams()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.CreateBufferWithMarker();
textLinesMock["Text"] = "";
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
// Create a mock engine provider.
BaseMock mockEngineProvider = MockFactories.EngineProviderFactory.GetInstance();
// Create a mock engine.
BaseMock mockEngine = MockFactories.CreateStandardEngine();
// Add the callbacks for the setter methods of stderr and stdout
mockEngine.AddMethodCallback(
string.Format("{0}.{1}", typeof(IEngine).FullName, "set_StdErr"),
new EventHandler<CallbackArgs>(SetEngineStdErr));
mockEngine.AddMethodCallback(
string.Format("{0}.{1}", typeof(IEngine).FullName, "set_StdOut"),
new EventHandler<CallbackArgs>(SetEngineStdOut));
// Set this engine as the one returned from the GetSharedEngine of the engine provider.
mockEngineProvider.AddMethodReturnValues(
string.Format("{0}.{1}", typeof(IPythonEngineProvider), "GetSharedEngine"),
new object[] { (IEngine)mockEngine });
// Add the engine provider to the list of the services.
provider.AddService(typeof(IPythonEngineProvider), mockEngineProvider, false);
// Create the console window.
using (IDisposable disposableObject = CommandWindowHelper.CreateConsoleWindow(provider) as IDisposable)
{
IVsWindowPane windowPane = disposableObject as IVsWindowPane;
Assert.IsNotNull(windowPane);
Assert.IsNotNull(mockEngine["StdErr"]);
Assert.IsNotNull(mockEngine["StdOut"]);
// Set the callback for the text buffer.
textLinesMock.AddMethodCallback(
string.Format("{0}.{1}", typeof(IVsTextLines).FullName, "ReplaceLines"),
new EventHandler<CallbackArgs>(ReplaceLinesCallback));
// Verify that the standard error stream is associated with the text buffer.
System.IO.Stream stream = (System.IO.Stream)mockEngine["StdErr"];
using (System.IO.StreamWriter writer = new System.IO.StreamWriter(stream))
{
writer.Write("Test String");
writer.Flush();
Assert.IsTrue((string)textLinesMock["Text"] == "Test String");
textLinesMock["Text"] = "";
}
// Verify the standard output.
stream = (System.IO.Stream)mockEngine["StdOut"];
using (System.IO.StreamWriter writer = new System.IO.StreamWriter(stream))
{
writer.Write("Test String");
writer.Flush();
Assert.IsTrue((string)textLinesMock["Text"] == "Test String");
textLinesMock["Text"] = "";
}
}
}
}
private static void AddCommandFilterCallback(object sender, CallbackArgs args)
{
BaseMock mock = (BaseMock)sender;
mock["CommandFilter"] = (IOleCommandTarget)args.GetParameter(0);
args.SetParameter(1, (IOleCommandTarget)mock["OriginalFilter"]);
args.ReturnValue = Microsoft.VisualStudio.VSConstants.S_OK;
}
[TestMethod]
public void VerifyCommandFilter()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
// Define the mock object for the text view.
BaseMock textViewMock = MockFactories.TextViewFactory.GetInstance();
textViewMock.AddMethodCallback(
string.Format("{0}.{1}", typeof(IVsTextView).FullName, "AddCommandFilter"),
new EventHandler<CallbackArgs>(AddCommandFilterCallback));
mockLocalRegistry.AddClass(typeof(VsTextViewClass), textViewMock);
// Create a command target that handles some random command
OleMenuCommandService commandService = new OleMenuCommandService(provider);
Guid newCommandGroup = Guid.NewGuid();
uint newCommandId = 42;
CommandID id = new CommandID(newCommandGroup, (int)newCommandId);
OleMenuCommand cmd = new OleMenuCommand(null, id);
commandService.AddCommand(cmd);
textViewMock["OriginalFilter"] = (IOleCommandTarget)commandService;
// Add the local registry to the list of services.
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
// Create the window.
using (ToolWindowPane windowPane = CommandWindowHelper.CreateConsoleWindow(provider) as ToolWindowPane)
{
Assert.IsNotNull(windowPane);
// Verify that the command specific to the text view are not handled yet.
CommandTargetHelper commandHelper = new CommandTargetHelper((IOleCommandTarget)windowPane);
uint flags;
Assert.IsFalse(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.RETURN,
out flags));
Assert.IsFalse(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.UP,
out flags));
Assert.IsFalse(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.DOWN,
out flags));
Assert.IsFalse(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.SHOWCONTEXTMENU,
out flags));
// Verify that also the command that we have defined here is not supported.
Assert.IsFalse(commandHelper.IsCommandSupported(newCommandGroup, newCommandId, out flags));
// Call the CreatePaneWindow method that will force the creation of the text view.
IntPtr newHwnd;
Assert.IsTrue(Microsoft.VisualStudio.ErrorHandler.Succeeded(
((IVsWindowPane)windowPane).CreatePaneWindow(IntPtr.Zero, 0, 0, 0, 0, out newHwnd)));
// Now we have to set the frame property on the ToolWindowFrame because
// this will cause the execution of OnToolWindowCreated.
windowPane.Frame = (IVsWindowFrame)MockFactories.WindowFrameFactory.GetInstance();
// Now the command filter should be set for the text view
Assert.IsNotNull(textViewMock["CommandFilter"]);
// The command target for the window pane should also be able to support
// the text view specific command that we have installed.
// Verify only two commands that are always supported
Assert.IsTrue(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.RETURN,
out flags));
Assert.IsTrue(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.BOL,
out flags));
Assert.IsTrue(commandHelper.IsCommandSupported(
typeof(Microsoft.VisualStudio.VSConstants.VSStd2KCmdID).GUID,
(int)(int)Microsoft.VisualStudio.VSConstants.VSStd2KCmdID.SHOWCONTEXTMENU,
out flags));
// Verify that also the commands supported by the original command target are
// supported by the new one.
Assert.IsTrue(commandHelper.IsCommandSupported(newCommandGroup, newCommandId, out flags));
}
}
}
[TestMethod]
public void ReadOnlyRegionAfterWrite()
{
using (OleServiceProvider provider = new OleServiceProvider())
{
// Create a mock text buffer for the console.
BaseMock textLinesMock = MockFactories.CreateBufferWithMarker();
// Add the buffer to the local registry.
LocalRegistryMock mockLocalRegistry = new LocalRegistryMock();
mockLocalRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
// Add the local registry to the list of services.
provider.AddService(typeof(SLocalRegistry), mockLocalRegistry, false);
// Create the console window.
using (ToolWindowPane windowPane = CommandWindowHelper.CreateConsoleWindow(provider) as ToolWindowPane)
{
// Get the stream from the window pane.
System.IO.Stream consoleStream = CommandWindowHelper.ConsoleStream(windowPane);
Assert.IsNotNull(consoleStream);
// Set a return value for GetLastLineIndex
textLinesMock.AddMethodReturnValues(
string.Format("{0}.{1}", typeof(IVsTextLines).FullName, "GetLastLineIndex"),
new object[] { 0, 12, 35 });
// Write some text on the stream.
System.IO.StreamWriter writer = new System.IO.StreamWriter(consoleStream);
writer.Write("");
writer.Flush();
// Verify that the ResetSpan method for the text marker was called and that
// the span is set to cover all the current buffer.
BaseMock markerMock = (BaseMock)textLinesMock["LineMarker"];
Assert.IsTrue(1 == markerMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsTextLineMarker).FullName, "ResetSpan")));
TextSpan span = (TextSpan)markerMock["Span"];
Assert.IsTrue(0 == span.iStartLine);
Assert.IsTrue(0 == span.iStartIndex);
Assert.IsTrue(12 == span.iEndLine);
Assert.IsTrue(35 == span.iEndIndex);
// Change the end point of the buffer and try again.
textLinesMock.AddMethodReturnValues(
string.Format("{0}.{1}", typeof(IVsTextLines).FullName, "GetLastLineIndex"),
new object[] { 0, 15, 3 });
writer.Write("abc");
writer.Flush();
Assert.IsTrue(2 == markerMock.FunctionCalls(string.Format("{0}.{1}", typeof(IVsTextLineMarker).FullName, "ResetSpan")));
span = (TextSpan)markerMock["Span"];
Assert.IsTrue(0 == span.iStartLine);
Assert.IsTrue(0 == span.iStartIndex);
Assert.IsTrue(15 == span.iEndLine);
Assert.IsTrue(3 == span.iEndIndex);
}
}
}
private static void GetLineTextCallbackForConsoleTextOfLine(object sender, CallbackArgs args)
{
BaseMock mock = (BaseMock)sender;
int expectedLine = (int)mock["ExpectedLine"];
Assert.IsTrue(expectedLine == (int)args.GetParameter(0));
Assert.IsTrue(expectedLine == (int)args.GetParameter(2));
int expectedStart = (int)mock["ExpectedStart"];
Assert.IsTrue(expectedStart == (int)args.GetParameter(1));
int expectedEnd = (int)mock["ExpectedEnd"];
Assert.IsTrue(expectedEnd == (int)args.GetParameter(3));
args.SetParameter(4, (string)mock["LineText"]);
args.ReturnValue = Microsoft.VisualStudio.VSConstants.S_OK;
}
[TestMethod]
public void ConsoleTextOfLineNoMarker()
{
string testString = "Test";
using (OleServiceProvider provider = new OleServiceProvider())
{
BaseMock textLinesMock = MockFactories.TextBufferFactory.GetInstance();
textLinesMock.AddMethodCallback(
string.Format("{0}.{1}", typeof(IVsTextLines).FullName, "GetLineText"),
new EventHandler<CallbackArgs>(GetLineTextCallbackForConsoleTextOfLine));
textLinesMock["LineText"] = testString;
textLinesMock["ExpectedLine"] = 1;
textLinesMock["ExpectedStart"] = 0;
textLinesMock["ExpectedEnd"] = 10;
// Create a new local registry class.
LocalRegistryMock mockRegistry = new LocalRegistryMock();
// Add the text buffer to the list of the classes that local registry can create.
mockRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
// Add the local registry to the service provider.
provider.AddService(typeof(SLocalRegistry), mockRegistry, false);
// Create the console.
using (ToolWindowPane windowPane = CommandWindowHelper.CreateConsoleWindow(provider) as ToolWindowPane)
{
IConsoleText consoleText = windowPane as IConsoleText;
Assert.IsNull(consoleText.TextOfLine(1, -1, true));
Assert.IsNull(consoleText.TextOfLine(1, -1, false));
string text = consoleText.TextOfLine(1, 10, false);
Assert.IsTrue(testString == text);
}
}
}
[TestMethod]
public void ConsoleTextOfLineWithMarker()
{
string testString1 = "Test 1";
string testString2 = "Test 2";
using (OleServiceProvider provider = new OleServiceProvider())
{
BaseMock textLinesMock = MockFactories.CreateBufferWithMarker();
textLinesMock.AddMethodCallback(
string.Format("{0}.{1}", typeof(IVsTextLines).FullName, "GetLineText"),
new EventHandler<CallbackArgs>(GetLineTextCallbackForConsoleTextOfLine));
// Create a new local registry class.
LocalRegistryMock mockRegistry = new LocalRegistryMock();
// Add the text buffer to the list of the classes that local registry can create.
mockRegistry.AddClass(typeof(VsTextBufferClass), textLinesMock);
// Add the local registry to the service provider.
provider.AddService(typeof(SLocalRegistry), mockRegistry, false);
// Create the console.
using (ToolWindowPane windowPane = CommandWindowHelper.CreateConsoleWindow(provider) as ToolWindowPane)
{
// Make sure that the text marker is created.
CommandWindowHelper.EnsureConsoleTextMarker(windowPane);
// Set the span of the marker.
TextSpan span = new TextSpan();
span.iStartLine = 0;
span.iStartIndex = 0;
span.iEndLine = 3;
span.iEndIndex = 5;
BaseMock markerMock = (BaseMock)textLinesMock["LineMarker"];
markerMock["Span"] = span;
IConsoleText consoleText = windowPane as IConsoleText;
// Verify the case that the requested line is all inside the
// read only region.
textLinesMock["LineText"] = testString1;
textLinesMock["ExpectedLine"] = 1;
textLinesMock["ExpectedStart"] = 0;
textLinesMock["ExpectedEnd"] = 10;
Assert.IsNull(consoleText.TextOfLine(1, 10, true));
string text = consoleText.TextOfLine(1, 10, false);
Assert.IsTrue(text == testString1);
// Now ask for some text inside the read-only region, but on its last line.
textLinesMock["LineText"] = testString2;
textLinesMock["ExpectedLine"] = 3;
textLinesMock["ExpectedStart"] = 0;
textLinesMock["ExpectedEnd"] = 4;
Assert.IsNull(consoleText.TextOfLine(3, 4, true));
text = consoleText.TextOfLine(3, 4, false);
Assert.IsTrue(text == testString2);
// Now the text is part inside and part outside the read-only region.
textLinesMock["LineText"] = testString1;
textLinesMock["ExpectedLine"] = 3;
textLinesMock["ExpectedStart"] = 5;
textLinesMock["ExpectedEnd"] = 10;
text = consoleText.TextOfLine(3, 10, true);
Assert.IsTrue(testString1 == text);
textLinesMock["LineText"] = testString2;
textLinesMock["ExpectedLine"] = 3;
textLinesMock["ExpectedStart"] = 0;
textLinesMock["ExpectedEnd"] = 10;
text = consoleText.TextOfLine(3, 10, false);
Assert.IsTrue(text == testString2);
// Now the line has no intersection with the read-only region.
textLinesMock["LineText"] = testString1;
textLinesMock["ExpectedLine"] = 4;
textLinesMock["ExpectedStart"] = 0;
textLinesMock["ExpectedEnd"] = 10;
text = consoleText.TextOfLine(4, 10, true);
Assert.IsTrue(testString1 == text);
textLinesMock["LineText"] = testString2;
textLinesMock["ExpectedLine"] = 4;
textLinesMock["ExpectedStart"] = 0;
textLinesMock["ExpectedEnd"] = 10;
text = consoleText.TextOfLine(4, 10, false);
Assert.IsTrue(text == testString2);
}
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Text;
using log4net.Config;
using NUnit.Framework;
using NUnit.Framework.SyntaxHelpers;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using log4net;
using System.Reflection;
using System.Data.Common;
// DBMS-specific:
using MySql.Data.MySqlClient;
using OpenSim.Data.MySQL;
using System.Data.SqlClient;
using OpenSim.Data.MSSQL;
using Mono.Data.Sqlite;
using OpenSim.Data.SQLite;
namespace OpenSim.Data.Tests
{
#if NUNIT25
[TestFixture(typeof(SqliteConnection), typeof(SQLiteRegionData), Description = "Region store tests (SQLite)")]
[TestFixture(typeof(MySqlConnection), typeof(MySqlRegionData), Description = "Region store tests (MySQL)")]
[TestFixture(typeof(SqlConnection), typeof(MSSQLRegionData), Description = "Region store tests (MS SQL Server)")]
#else
[TestFixture(Description = "Region store tests (SQLite)")]
public class SQLiteRegionTests : RegionTests<SqliteConnection, SQLiteSimulationData>
{
}
[TestFixture(Description = "Region store tests (MySQL)")]
public class MySqlRegionTests : RegionTests<MySqlConnection, MySQLSimulationData>
{
}
[TestFixture(Description = "Region store tests (MS SQL Server)")]
public class MSSQLRegionTests : RegionTests<SqlConnection, MSSQLSimulationData>
{
}
#endif
public class RegionTests<TConn, TRegStore> : BasicDataServiceTest<TConn, TRegStore>
where TConn : DbConnection, new()
where TRegStore : class, ISimulationDataStore, new()
{
bool m_rebuildDB;
public ISimulationDataStore db;
public UUID zero = UUID.Zero;
public UUID region1 = UUID.Random();
public UUID region2 = UUID.Random();
public UUID region3 = UUID.Random();
public UUID region4 = UUID.Random();
public UUID prim1 = UUID.Random();
public UUID prim2 = UUID.Random();
public UUID prim3 = UUID.Random();
public UUID prim4 = UUID.Random();
public UUID prim5 = UUID.Random();
public UUID prim6 = UUID.Random();
public UUID item1 = UUID.Random();
public UUID item2 = UUID.Random();
public UUID item3 = UUID.Random();
public static Random random = new Random();
public string itemname1 = "item1";
public uint localID = 1;
public double height1 = 20;
public double height2 = 100;
public RegionTests(string conn, bool rebuild)
: base(conn)
{
m_rebuildDB = rebuild;
}
public RegionTests() : this("", true) { }
public RegionTests(string conn) : this(conn, true) {}
public RegionTests(bool rebuild): this("", rebuild) {}
protected override void InitService(object service)
{
ClearDB();
db = (ISimulationDataStore)service;
db.Initialise(m_connStr);
}
private void ClearDB()
{
string[] reg_tables = new string[] {
"prims", "primshapes", "primitems", "terrain", "land", "landaccesslist", "regionban", "regionsettings"
};
if (m_rebuildDB)
{
DropTables(reg_tables);
ResetMigrations("RegionStore");
}else
ClearTables(reg_tables);
}
// Test Plan
// Prims
// - empty test - 001
// - store / retrieve basic prims (most minimal we can make) - 010, 011
// - store / retrieve parts in a scenegroup 012
// - store a prim with complete information for consistency check 013
// - update existing prims, make sure it sticks - 014
// - tests empty inventory - 020
// - add inventory items to prims make - 021
// - retrieves the added item - 022
// - update inventory items to prims - 023
// - remove inventory items make sure it sticks - 024
// - checks if all parameters are persistent - 025
// - adds many items and see if it is handled correctly - 026
[Test]
public void T001_LoadEmpty()
{
List<SceneObjectGroup> objs = db.LoadObjects(region1);
List<SceneObjectGroup> objs3 = db.LoadObjects(region3);
List<LandData> land = db.LoadLandObjects(region1);
Assert.That(objs.Count, Is.EqualTo(0), "Assert.That(objs.Count, Is.EqualTo(0))");
Assert.That(objs3.Count, Is.EqualTo(0), "Assert.That(objs3.Count, Is.EqualTo(0))");
Assert.That(land.Count, Is.EqualTo(0), "Assert.That(land.Count, Is.EqualTo(0))");
}
// SOG round trips
// * store objects, make sure they save
// * update
[Test]
public void T010_StoreSimpleObject()
{
SceneObjectGroup sog = NewSOG("object1", prim1, region1);
SceneObjectGroup sog2 = NewSOG("object2", prim2, region1);
// in case the objects don't store
try
{
db.StoreObject(sog, region1);
}
catch (Exception e)
{
m_log.Error(e.ToString());
Assert.Fail();
}
try
{
db.StoreObject(sog2, region1);
}
catch (Exception e)
{
m_log.Error(e.ToString());
Assert.Fail();
}
// This tests the ADO.NET driver
List<SceneObjectGroup> objs = db.LoadObjects(region1);
Assert.That(objs.Count, Is.EqualTo(2), "Assert.That(objs.Count, Is.EqualTo(2))");
}
[Test]
public void T011_ObjectNames()
{
List<SceneObjectGroup> objs = db.LoadObjects(region1);
foreach (SceneObjectGroup sog in objs)
{
SceneObjectPart p = sog.RootPart;
Assert.That("", Is.Not.EqualTo(p.Name), "Assert.That(\"\", Is.Not.EqualTo(p.Name))");
Assert.That(p.Name, Is.EqualTo(p.Description), "Assert.That(p.Name, Is.EqualTo(p.Description))");
}
}
[Test]
public void T012_SceneParts()
{
UUID tmp0 = UUID.Random();
UUID tmp1 = UUID.Random();
UUID tmp2 = UUID.Random();
UUID tmp3 = UUID.Random();
UUID newregion = UUID.Random();
SceneObjectPart p1 = NewSOP("SoP 1",tmp1);
SceneObjectPart p2 = NewSOP("SoP 2",tmp2);
SceneObjectPart p3 = NewSOP("SoP 3",tmp3);
SceneObjectGroup sog = NewSOG("Sop 0", tmp0, newregion);
sog.AddPart(p1);
sog.AddPart(p2);
sog.AddPart(p3);
SceneObjectPart[] parts = sog.Parts;
Assert.That(parts.Length,Is.EqualTo(4), "Assert.That(parts.Length,Is.EqualTo(4))");
db.StoreObject(sog, newregion);
List<SceneObjectGroup> sogs = db.LoadObjects(newregion);
Assert.That(sogs.Count,Is.EqualTo(1), "Assert.That(sogs.Count,Is.EqualTo(1))");
SceneObjectGroup newsog = sogs[0];
SceneObjectPart[] newparts = newsog.Parts;
Assert.That(newparts.Length,Is.EqualTo(4), "Assert.That(newparts.Length,Is.EqualTo(4))");
Assert.That(newsog.HasChildPrim(tmp0), "Assert.That(newsog.HasChildPrim(tmp0))");
Assert.That(newsog.HasChildPrim(tmp1), "Assert.That(newsog.HasChildPrim(tmp1))");
Assert.That(newsog.HasChildPrim(tmp2), "Assert.That(newsog.HasChildPrim(tmp2))");
Assert.That(newsog.HasChildPrim(tmp3), "Assert.That(newsog.HasChildPrim(tmp3))");
}
[Test]
public void T013_DatabasePersistency()
{
// Sets all ScenePart parameters, stores and retrieves them, then check for consistency with initial data
// The commented Asserts are the ones that are unchangeable (when storing on the database, their "Set" values are ignored
// The ObjectFlags is an exception, if it is entered incorrectly, the object IS REJECTED on the database silently.
UUID creator,uuid = new UUID();
creator = UUID.Random();
uint iserial = (uint)random.Next();
TaskInventoryDictionary dic = new TaskInventoryDictionary();
uint objf = (uint) random.Next();
uuid = prim4;
uint localid = localID+1;
localID = localID + 1;
string name = "Adam West";
byte material = (byte) random.Next(127);
ulong regionh = (ulong)random.NextDouble() * (ulong)random.Next();
int pin = random.Next();
Byte[] partsys = new byte[8];
Byte[] textani = new byte[8];
random.NextBytes(textani);
random.NextBytes(partsys);
DateTime expires = new DateTime(2008, 12, 20);
DateTime rezzed = new DateTime(2009, 07, 15);
Vector3 groupos = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 offset = new Vector3(random.Next(),random.Next(),random.Next());
Quaternion rotoff = new Quaternion(random.Next(),random.Next(),random.Next(),random.Next());
Vector3 velocity = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 angvelo = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 accel = new Vector3(random.Next(),random.Next(),random.Next());
string description = name;
Color color = Color.FromArgb(255, 165, 50, 100);
string text = "All Your Base Are Belong to Us";
string sitname = "SitName";
string touchname = "TouchName";
int linknum = random.Next();
byte clickaction = (byte) random.Next(127);
PrimitiveBaseShape pbshap = new PrimitiveBaseShape();
pbshap = PrimitiveBaseShape.Default;
pbshap.PathBegin = ushort.MaxValue;
pbshap.PathEnd = ushort.MaxValue;
pbshap.ProfileBegin = ushort.MaxValue;
pbshap.ProfileEnd = ushort.MaxValue;
pbshap.ProfileHollow = ushort.MaxValue;
Vector3 scale = new Vector3(random.Next(),random.Next(),random.Next());
byte updatef = (byte) random.Next(127);
RegionInfo regionInfo = new RegionInfo();
regionInfo.RegionID = region3;
regionInfo.RegionLocX = 0;
regionInfo.RegionLocY = 0;
// Scene scene = new Scene(regionInfo);
SceneObjectPart sop = new SceneObjectPart();
sop.RegionHandle = regionh;
sop.UUID = uuid;
sop.LocalId = localid;
sop.Shape = pbshap;
sop.GroupPosition = groupos;
sop.RotationOffset = rotoff;
sop.CreatorID = creator;
sop.InventorySerial = iserial;
sop.TaskInventory = dic;
sop.Flags = (PrimFlags)objf;
sop.Name = name;
sop.Material = material;
sop.ScriptAccessPin = pin;
sop.TextureAnimation = textani;
sop.ParticleSystem = partsys;
sop.Expires = expires;
sop.Rezzed = rezzed;
sop.OffsetPosition = offset;
sop.Velocity = velocity;
sop.AngularVelocity = angvelo;
sop.Acceleration = accel;
sop.Description = description;
sop.Color = color;
sop.Text = text;
sop.SitName = sitname;
sop.TouchName = touchname;
sop.LinkNum = linknum;
sop.ClickAction = clickaction;
sop.Scale = scale;
sop.UpdateFlag = updatef;
//Tests if local part accepted the parameters:
Assert.That(regionh,Is.EqualTo(sop.RegionHandle), "Assert.That(regionh,Is.EqualTo(sop.RegionHandle))");
Assert.That(localid,Is.EqualTo(sop.LocalId), "Assert.That(localid,Is.EqualTo(sop.LocalId))");
Assert.That(groupos,Is.EqualTo(sop.GroupPosition), "Assert.That(groupos,Is.EqualTo(sop.GroupPosition))");
Assert.That(name,Is.EqualTo(sop.Name), "Assert.That(name,Is.EqualTo(sop.Name))");
Assert.That(rotoff,Is.EqualTo(sop.RotationOffset), "Assert.That(rotoff,Is.EqualTo(sop.RotationOffset))");
Assert.That(uuid,Is.EqualTo(sop.UUID), "Assert.That(uuid,Is.EqualTo(sop.UUID))");
Assert.That(creator,Is.EqualTo(sop.CreatorID), "Assert.That(creator,Is.EqualTo(sop.CreatorID))");
// Modified in-class
// Assert.That(iserial,Is.EqualTo(sop.InventorySerial), "Assert.That(iserial,Is.EqualTo(sop.InventorySerial))");
Assert.That(dic,Is.EqualTo(sop.TaskInventory), "Assert.That(dic,Is.EqualTo(sop.TaskInventory))");
Assert.That(objf, Is.EqualTo((uint)sop.Flags), "Assert.That(objf,Is.EqualTo(sop.Flags))");
Assert.That(name,Is.EqualTo(sop.Name), "Assert.That(name,Is.EqualTo(sop.Name))");
Assert.That(material,Is.EqualTo(sop.Material), "Assert.That(material,Is.EqualTo(sop.Material))");
Assert.That(pin,Is.EqualTo(sop.ScriptAccessPin), "Assert.That(pin,Is.EqualTo(sop.ScriptAccessPin))");
Assert.That(textani,Is.EqualTo(sop.TextureAnimation), "Assert.That(textani,Is.EqualTo(sop.TextureAnimation))");
Assert.That(partsys,Is.EqualTo(sop.ParticleSystem), "Assert.That(partsys,Is.EqualTo(sop.ParticleSystem))");
Assert.That(expires,Is.EqualTo(sop.Expires), "Assert.That(expires,Is.EqualTo(sop.Expires))");
Assert.That(rezzed,Is.EqualTo(sop.Rezzed), "Assert.That(rezzed,Is.EqualTo(sop.Rezzed))");
Assert.That(offset,Is.EqualTo(sop.OffsetPosition), "Assert.That(offset,Is.EqualTo(sop.OffsetPosition))");
Assert.That(velocity,Is.EqualTo(sop.Velocity), "Assert.That(velocity,Is.EqualTo(sop.Velocity))");
Assert.That(angvelo,Is.EqualTo(sop.AngularVelocity), "Assert.That(angvelo,Is.EqualTo(sop.AngularVelocity))");
Assert.That(accel,Is.EqualTo(sop.Acceleration), "Assert.That(accel,Is.EqualTo(sop.Acceleration))");
Assert.That(description,Is.EqualTo(sop.Description), "Assert.That(description,Is.EqualTo(sop.Description))");
Assert.That(color,Is.EqualTo(sop.Color), "Assert.That(color,Is.EqualTo(sop.Color))");
Assert.That(text,Is.EqualTo(sop.Text), "Assert.That(text,Is.EqualTo(sop.Text))");
Assert.That(sitname,Is.EqualTo(sop.SitName), "Assert.That(sitname,Is.EqualTo(sop.SitName))");
Assert.That(touchname,Is.EqualTo(sop.TouchName), "Assert.That(touchname,Is.EqualTo(sop.TouchName))");
Assert.That(linknum,Is.EqualTo(sop.LinkNum), "Assert.That(linknum,Is.EqualTo(sop.LinkNum))");
Assert.That(clickaction,Is.EqualTo(sop.ClickAction), "Assert.That(clickaction,Is.EqualTo(sop.ClickAction))");
Assert.That(scale,Is.EqualTo(sop.Scale), "Assert.That(scale,Is.EqualTo(sop.Scale))");
Assert.That(updatef,Is.EqualTo(sop.UpdateFlag), "Assert.That(updatef,Is.EqualTo(sop.UpdateFlag))");
// This is necessary or object will not be inserted in DB
sop.Flags = PrimFlags.None;
SceneObjectGroup sog = new SceneObjectGroup(sop);
// Inserts group in DB
db.StoreObject(sog,region3);
List<SceneObjectGroup> sogs = db.LoadObjects(region3);
Assert.That(sogs.Count, Is.EqualTo(1), "Assert.That(sogs.Count, Is.EqualTo(1))");
// Makes sure there are no double insertions:
db.StoreObject(sog,region3);
sogs = db.LoadObjects(region3);
Assert.That(sogs.Count, Is.EqualTo(1), "Assert.That(sogs.Count, Is.EqualTo(1))");
// Tests if the parameters were inserted correctly
SceneObjectPart p = sogs[0].RootPart;
Assert.That(regionh,Is.EqualTo(p.RegionHandle), "Assert.That(regionh,Is.EqualTo(p.RegionHandle))");
//Assert.That(localid,Is.EqualTo(p.LocalId), "Assert.That(localid,Is.EqualTo(p.LocalId))");
Assert.That(groupos,Is.EqualTo(p.GroupPosition), "Assert.That(groupos,Is.EqualTo(p.GroupPosition))");
Assert.That(name,Is.EqualTo(p.Name), "Assert.That(name,Is.EqualTo(p.Name))");
Assert.That(rotoff,Is.EqualTo(p.RotationOffset), "Assert.That(rotoff,Is.EqualTo(p.RotationOffset))");
Assert.That(uuid,Is.EqualTo(p.UUID), "Assert.That(uuid,Is.EqualTo(p.UUID))");
Assert.That(creator,Is.EqualTo(p.CreatorID), "Assert.That(creator,Is.EqualTo(p.CreatorID))");
//Assert.That(iserial,Is.EqualTo(p.InventorySerial), "Assert.That(iserial,Is.EqualTo(p.InventorySerial))");
Assert.That(dic,Is.EqualTo(p.TaskInventory), "Assert.That(dic,Is.EqualTo(p.TaskInventory))");
//Assert.That(objf, Is.EqualTo((uint)p.Flags), "Assert.That(objf,Is.EqualTo(p.Flags))");
Assert.That(name,Is.EqualTo(p.Name), "Assert.That(name,Is.EqualTo(p.Name))");
Assert.That(material,Is.EqualTo(p.Material), "Assert.That(material,Is.EqualTo(p.Material))");
Assert.That(pin,Is.EqualTo(p.ScriptAccessPin), "Assert.That(pin,Is.EqualTo(p.ScriptAccessPin))");
Assert.That(textani,Is.EqualTo(p.TextureAnimation), "Assert.That(textani,Is.EqualTo(p.TextureAnimation))");
Assert.That(partsys,Is.EqualTo(p.ParticleSystem), "Assert.That(partsys,Is.EqualTo(p.ParticleSystem))");
//Assert.That(expires,Is.EqualTo(p.Expires), "Assert.That(expires,Is.EqualTo(p.Expires))");
//Assert.That(rezzed,Is.EqualTo(p.Rezzed), "Assert.That(rezzed,Is.EqualTo(p.Rezzed))");
Assert.That(offset,Is.EqualTo(p.OffsetPosition), "Assert.That(offset,Is.EqualTo(p.OffsetPosition))");
Assert.That(velocity,Is.EqualTo(p.Velocity), "Assert.That(velocity,Is.EqualTo(p.Velocity))");
Assert.That(angvelo,Is.EqualTo(p.AngularVelocity), "Assert.That(angvelo,Is.EqualTo(p.AngularVelocity))");
Assert.That(accel,Is.EqualTo(p.Acceleration), "Assert.That(accel,Is.EqualTo(p.Acceleration))");
Assert.That(description,Is.EqualTo(p.Description), "Assert.That(description,Is.EqualTo(p.Description))");
Assert.That(color,Is.EqualTo(p.Color), "Assert.That(color,Is.EqualTo(p.Color))");
Assert.That(text,Is.EqualTo(p.Text), "Assert.That(text,Is.EqualTo(p.Text))");
Assert.That(sitname,Is.EqualTo(p.SitName), "Assert.That(sitname,Is.EqualTo(p.SitName))");
Assert.That(touchname,Is.EqualTo(p.TouchName), "Assert.That(touchname,Is.EqualTo(p.TouchName))");
//Assert.That(linknum,Is.EqualTo(p.LinkNum), "Assert.That(linknum,Is.EqualTo(p.LinkNum))");
Assert.That(clickaction,Is.EqualTo(p.ClickAction), "Assert.That(clickaction,Is.EqualTo(p.ClickAction))");
Assert.That(scale,Is.EqualTo(p.Scale), "Assert.That(scale,Is.EqualTo(p.Scale))");
//Assert.That(updatef,Is.EqualTo(p.UpdateFlag), "Assert.That(updatef,Is.EqualTo(p.UpdateFlag))");
Assert.That(pbshap.PathBegin, Is.EqualTo(p.Shape.PathBegin), "Assert.That(pbshap.PathBegin, Is.EqualTo(p.Shape.PathBegin))");
Assert.That(pbshap.PathEnd, Is.EqualTo(p.Shape.PathEnd), "Assert.That(pbshap.PathEnd, Is.EqualTo(p.Shape.PathEnd))");
Assert.That(pbshap.ProfileBegin, Is.EqualTo(p.Shape.ProfileBegin), "Assert.That(pbshap.ProfileBegin, Is.EqualTo(p.Shape.ProfileBegin))");
Assert.That(pbshap.ProfileEnd, Is.EqualTo(p.Shape.ProfileEnd), "Assert.That(pbshap.ProfileEnd, Is.EqualTo(p.Shape.ProfileEnd))");
Assert.That(pbshap.ProfileHollow, Is.EqualTo(p.Shape.ProfileHollow), "Assert.That(pbshap.ProfileHollow, Is.EqualTo(p.Shape.ProfileHollow))");
}
[Test]
public void T014_UpdateObject()
{
string text1 = "object1 text";
SceneObjectGroup sog = FindSOG("object1", region1);
sog.RootPart.Text = text1;
db.StoreObject(sog, region1);
sog = FindSOG("object1", region1);
Assert.That(text1, Is.EqualTo(sog.RootPart.Text), "Assert.That(text1, Is.EqualTo(sog.RootPart.Text))");
// Creates random values
UUID creator = new UUID();
creator = UUID.Random();
TaskInventoryDictionary dic = new TaskInventoryDictionary();
localID = localID + 1;
string name = "West Adam";
byte material = (byte) random.Next(127);
ulong regionh = (ulong)random.NextDouble() * (ulong)random.Next();
int pin = random.Next();
Byte[] partsys = new byte[8];
Byte[] textani = new byte[8];
random.NextBytes(textani);
random.NextBytes(partsys);
DateTime expires = new DateTime(2010, 12, 20);
DateTime rezzed = new DateTime(2005, 07, 15);
Vector3 groupos = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 offset = new Vector3(random.Next(),random.Next(),random.Next());
Quaternion rotoff = new Quaternion(random.Next(),random.Next(),random.Next(),random.Next());
Vector3 velocity = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 angvelo = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 accel = new Vector3(random.Next(),random.Next(),random.Next());
string description = name;
Color color = Color.FromArgb(255, 255, 255, 0);
string text = "What You Say?{]\vz~";
string sitname = RandomName();
string touchname = RandomName();
int linknum = random.Next();
byte clickaction = (byte) random.Next(127);
PrimitiveBaseShape pbshap = new PrimitiveBaseShape();
pbshap = PrimitiveBaseShape.Default;
Vector3 scale = new Vector3(random.Next(),random.Next(),random.Next());
byte updatef = (byte) random.Next(127);
// Updates the region with new values
SceneObjectGroup sog2 = FindSOG("Adam West", region3);
Assert.That(sog2,Is.Not.Null);
sog2.RootPart.RegionHandle = regionh;
sog2.RootPart.Shape = pbshap;
sog2.RootPart.GroupPosition = groupos;
sog2.RootPart.RotationOffset = rotoff;
sog2.RootPart.CreatorID = creator;
sog2.RootPart.TaskInventory = dic;
sog2.RootPart.Name = name;
sog2.RootPart.Material = material;
sog2.RootPart.ScriptAccessPin = pin;
sog2.RootPart.TextureAnimation = textani;
sog2.RootPart.ParticleSystem = partsys;
sog2.RootPart.Expires = expires;
sog2.RootPart.Rezzed = rezzed;
sog2.RootPart.OffsetPosition = offset;
sog2.RootPart.Velocity = velocity;
sog2.RootPart.AngularVelocity = angvelo;
sog2.RootPart.Acceleration = accel;
sog2.RootPart.Description = description;
sog2.RootPart.Color = color;
sog2.RootPart.Text = text;
sog2.RootPart.SitName = sitname;
sog2.RootPart.TouchName = touchname;
sog2.RootPart.LinkNum = linknum;
sog2.RootPart.ClickAction = clickaction;
sog2.RootPart.Scale = scale;
sog2.RootPart.UpdateFlag = updatef;
db.StoreObject(sog2, region3);
List<SceneObjectGroup> sogs = db.LoadObjects(region3);
Assert.That(sogs.Count, Is.EqualTo(1), "Assert.That(sogs.Count, Is.EqualTo(1))");
SceneObjectGroup retsog = FindSOG("West Adam", region3);
Assert.That(retsog,Is.Not.Null);
SceneObjectPart p = retsog.RootPart;
Assert.That(regionh,Is.EqualTo(p.RegionHandle), "Assert.That(regionh,Is.EqualTo(p.RegionHandle))");
Assert.That(groupos,Is.EqualTo(p.GroupPosition), "Assert.That(groupos,Is.EqualTo(p.GroupPosition))");
Assert.That(name,Is.EqualTo(p.Name), "Assert.That(name,Is.EqualTo(p.Name))");
Assert.That(rotoff,Is.EqualTo(p.RotationOffset), "Assert.That(rotoff,Is.EqualTo(p.RotationOffset))");
Assert.That(creator,Is.EqualTo(p.CreatorID), "Assert.That(creator,Is.EqualTo(p.CreatorID))");
Assert.That(dic,Is.EqualTo(p.TaskInventory), "Assert.That(dic,Is.EqualTo(p.TaskInventory))");
Assert.That(name,Is.EqualTo(p.Name), "Assert.That(name,Is.EqualTo(p.Name))");
Assert.That(material,Is.EqualTo(p.Material), "Assert.That(material,Is.EqualTo(p.Material))");
Assert.That(pin,Is.EqualTo(p.ScriptAccessPin), "Assert.That(pin,Is.EqualTo(p.ScriptAccessPin))");
Assert.That(textani,Is.EqualTo(p.TextureAnimation), "Assert.That(textani,Is.EqualTo(p.TextureAnimation))");
Assert.That(partsys,Is.EqualTo(p.ParticleSystem), "Assert.That(partsys,Is.EqualTo(p.ParticleSystem))");
Assert.That(offset,Is.EqualTo(p.OffsetPosition), "Assert.That(offset,Is.EqualTo(p.OffsetPosition))");
Assert.That(velocity,Is.EqualTo(p.Velocity), "Assert.That(velocity,Is.EqualTo(p.Velocity))");
Assert.That(angvelo,Is.EqualTo(p.AngularVelocity), "Assert.That(angvelo,Is.EqualTo(p.AngularVelocity))");
Assert.That(accel,Is.EqualTo(p.Acceleration), "Assert.That(accel,Is.EqualTo(p.Acceleration))");
Assert.That(description,Is.EqualTo(p.Description), "Assert.That(description,Is.EqualTo(p.Description))");
Assert.That(color,Is.EqualTo(p.Color), "Assert.That(color,Is.EqualTo(p.Color))");
Assert.That(text,Is.EqualTo(p.Text), "Assert.That(text,Is.EqualTo(p.Text))");
Assert.That(sitname,Is.EqualTo(p.SitName), "Assert.That(sitname,Is.EqualTo(p.SitName))");
Assert.That(touchname,Is.EqualTo(p.TouchName), "Assert.That(touchname,Is.EqualTo(p.TouchName))");
Assert.That(clickaction,Is.EqualTo(p.ClickAction), "Assert.That(clickaction,Is.EqualTo(p.ClickAction))");
Assert.That(scale,Is.EqualTo(p.Scale), "Assert.That(scale,Is.EqualTo(p.Scale))");
}
[Test]
public void T015_LargeSceneObjects()
{
UUID id = UUID.Random();
Dictionary<UUID, SceneObjectPart> mydic = new Dictionary<UUID, SceneObjectPart>();
SceneObjectGroup sog = NewSOG("Test SOG", id, region4);
mydic.Add(sog.RootPart.UUID,sog.RootPart);
for (int i=0;i<30;i++)
{
UUID tmp = UUID.Random();
SceneObjectPart sop = NewSOP(("Test SOP " + i.ToString()),tmp);
Vector3 groupos = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 offset = new Vector3(random.Next(),random.Next(),random.Next());
Quaternion rotoff = new Quaternion(random.Next(),random.Next(),random.Next(),random.Next());
Vector3 velocity = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 angvelo = new Vector3(random.Next(),random.Next(),random.Next());
Vector3 accel = new Vector3(random.Next(),random.Next(),random.Next());
sop.GroupPosition = groupos;
sop.RotationOffset = rotoff;
sop.OffsetPosition = offset;
sop.Velocity = velocity;
sop.AngularVelocity = angvelo;
sop.Acceleration = accel;
mydic.Add(tmp,sop);
sog.AddPart(sop);
db.StoreObject(sog, region4);
}
SceneObjectGroup retsog = FindSOG("Test SOG", region4);
SceneObjectPart[] parts = retsog.Parts;
for (int i=0;i<30;i++)
{
SceneObjectPart cursop = mydic[parts[i].UUID];
Assert.That(cursop.GroupPosition,Is.EqualTo(parts[i].GroupPosition), "Assert.That(cursop.GroupPosition,Is.EqualTo(parts[i].GroupPosition))");
Assert.That(cursop.RotationOffset,Is.EqualTo(parts[i].RotationOffset), "Assert.That(cursop.RotationOffset,Is.EqualTo(parts[i].RotationOffset))");
Assert.That(cursop.OffsetPosition,Is.EqualTo(parts[i].OffsetPosition), "Assert.That(cursop.OffsetPosition,Is.EqualTo(parts[i].OffsetPosition))");
Assert.That(cursop.Velocity,Is.EqualTo(parts[i].Velocity), "Assert.That(cursop.Velocity,Is.EqualTo(parts[i].Velocity))");
Assert.That(cursop.AngularVelocity,Is.EqualTo(parts[i].AngularVelocity), "Assert.That(cursop.AngularVelocity,Is.EqualTo(parts[i].AngularVelocity))");
Assert.That(cursop.Acceleration,Is.EqualTo(parts[i].Acceleration), "Assert.That(cursop.Acceleration,Is.EqualTo(parts[i].Acceleration))");
}
}
//[Test]
public void T016_RandomSogWithSceneParts()
{
PropertyScrambler<SceneObjectPart> scrambler =
new PropertyScrambler<SceneObjectPart>()
.DontScramble(x => x.UUID);
UUID tmpSog = UUID.Random();
UUID tmp1 = UUID.Random();
UUID tmp2 = UUID.Random();
UUID tmp3 = UUID.Random();
UUID newregion = UUID.Random();
SceneObjectPart p1 = new SceneObjectPart();
SceneObjectPart p2 = new SceneObjectPart();
SceneObjectPart p3 = new SceneObjectPart();
p1.Shape = PrimitiveBaseShape.Default;
p2.Shape = PrimitiveBaseShape.Default;
p3.Shape = PrimitiveBaseShape.Default;
p1.UUID = tmp1;
p2.UUID = tmp2;
p3.UUID = tmp3;
scrambler.Scramble(p1);
scrambler.Scramble(p2);
scrambler.Scramble(p3);
SceneObjectGroup sog = NewSOG("Sop 0", tmpSog, newregion);
PropertyScrambler<SceneObjectGroup> sogScrambler =
new PropertyScrambler<SceneObjectGroup>()
.DontScramble(x => x.UUID);
sogScrambler.Scramble(sog);
sog.UUID = tmpSog;
sog.AddPart(p1);
sog.AddPart(p2);
sog.AddPart(p3);
SceneObjectPart[] parts = sog.Parts;
Assert.That(parts.Length, Is.EqualTo(4), "Assert.That(parts.Length,Is.EqualTo(4))");
db.StoreObject(sog, newregion);
List<SceneObjectGroup> sogs = db.LoadObjects(newregion);
Assert.That(sogs.Count, Is.EqualTo(1), "Assert.That(sogs.Count,Is.EqualTo(1))");
SceneObjectGroup newsog = sogs[0];
SceneObjectPart[] newparts = newsog.Parts;
Assert.That(newparts.Length, Is.EqualTo(4), "Assert.That(newparts.Length,Is.EqualTo(4))");
Assert.That(newsog, Constraints.PropertyCompareConstraint(sog)
.IgnoreProperty(x=>x.LocalId)
.IgnoreProperty(x=>x.HasGroupChanged)
.IgnoreProperty(x=>x.IsSelected)
.IgnoreProperty(x=>x.RegionHandle)
.IgnoreProperty(x=>x.RegionUUID)
.IgnoreProperty(x=>x.Scene)
.IgnoreProperty(x=>x.Parts)
.IgnoreProperty(x=>x.PassCollision)
.IgnoreProperty(x=>x.RootPart));
}
private SceneObjectGroup GetMySOG(string name)
{
SceneObjectGroup sog = FindSOG(name, region1);
if (sog == null)
{
sog = NewSOG(name, prim1, region1);
db.StoreObject(sog, region1);
}
return sog;
}
// NOTE: it is a bad practice to rely on some of the previous tests having been run before.
// If the tests are run manually, one at a time, each starts with full class init (DB cleared).
// Even when all tests are run, NUnit 2.5+ no longer guarantee a specific test order.
// We shouldn't expect to find anything in the DB if we haven't put it there *in the same test*!
[Test]
public void T020_PrimInventoryEmpty()
{
SceneObjectGroup sog = GetMySOG("object1");
TaskInventoryItem t = sog.GetInventoryItem(sog.RootPart.LocalId, item1);
Assert.That(t, Is.Null);
}
// TODO: Is there any point to call StorePrimInventory on a list, rather than on the prim itself?
private void StoreInventory(SceneObjectGroup sog)
{
List<TaskInventoryItem> list = new List<TaskInventoryItem>();
// TODO: seriously??? this is the way we need to loop to get this?
foreach (UUID uuid in sog.RootPart.Inventory.GetInventoryList())
{
list.Add(sog.GetInventoryItem(sog.RootPart.LocalId, uuid));
}
db.StorePrimInventory(sog.RootPart.UUID, list);
}
[Test]
public void T021_PrimInventoryBasic()
{
SceneObjectGroup sog = GetMySOG("object1");
InventoryItemBase i = NewItem(item1, zero, zero, itemname1, zero);
Assert.That(sog.AddInventoryItem(null, sog.RootPart.LocalId, i, zero), Is.True);
TaskInventoryItem t = sog.GetInventoryItem(sog.RootPart.LocalId, item1);
Assert.That(t.Name, Is.EqualTo(itemname1), "Assert.That(t.Name, Is.EqualTo(itemname1))");
StoreInventory(sog);
SceneObjectGroup sog1 = FindSOG("object1", region1);
Assert.That(sog1, Is.Not.Null);
TaskInventoryItem t1 = sog1.GetInventoryItem(sog1.RootPart.LocalId, item1);
Assert.That(t1, Is.Not.Null);
Assert.That(t1.Name, Is.EqualTo(itemname1), "Assert.That(t.Name, Is.EqualTo(itemname1))");
// Updating inventory
t1.Name = "My New Name";
sog1.UpdateInventoryItem(t1);
StoreInventory(sog1);
SceneObjectGroup sog2 = FindSOG("object1", region1);
TaskInventoryItem t2 = sog2.GetInventoryItem(sog2.RootPart.LocalId, item1);
Assert.That(t2.Name, Is.EqualTo("My New Name"), "Assert.That(t.Name, Is.EqualTo(\"My New Name\"))");
// Removing inventory
List<TaskInventoryItem> list = new List<TaskInventoryItem>();
db.StorePrimInventory(prim1, list);
sog = FindSOG("object1", region1);
t = sog.GetInventoryItem(sog.RootPart.LocalId, item1);
Assert.That(t, Is.Null);
}
[Test]
public void T025_PrimInventoryPersistency()
{
InventoryItemBase i = new InventoryItemBase();
UUID id = UUID.Random();
i.ID = id;
UUID folder = UUID.Random();
i.Folder = folder;
UUID owner = UUID.Random();
i.Owner = owner;
UUID creator = UUID.Random();
i.CreatorId = creator.ToString();
string name = RandomName();
i.Name = name;
i.Description = name;
UUID assetid = UUID.Random();
i.AssetID = assetid;
int invtype = random.Next();
i.InvType = invtype;
uint nextperm = (uint) random.Next();
i.NextPermissions = nextperm;
uint curperm = (uint) random.Next();
i.CurrentPermissions = curperm;
uint baseperm = (uint) random.Next();
i.BasePermissions = baseperm;
uint eoperm = (uint) random.Next();
i.EveryOnePermissions = eoperm;
int assettype = random.Next();
i.AssetType = assettype;
UUID groupid = UUID.Random();
i.GroupID = groupid;
bool groupown = true;
i.GroupOwned = groupown;
int saleprice = random.Next();
i.SalePrice = saleprice;
byte saletype = (byte) random.Next(127);
i.SaleType = saletype;
uint flags = (uint) random.Next();
i.Flags = flags;
int creationd = random.Next();
i.CreationDate = creationd;
SceneObjectGroup sog = GetMySOG("object1");
Assert.That(sog.AddInventoryItem(null, sog.RootPart.LocalId, i, zero), Is.True);
TaskInventoryItem t = sog.GetInventoryItem(sog.RootPart.LocalId, id);
Assert.That(t.Name, Is.EqualTo(name), "Assert.That(t.Name, Is.EqualTo(name))");
Assert.That(t.AssetID,Is.EqualTo(assetid), "Assert.That(t.AssetID,Is.EqualTo(assetid))");
Assert.That(t.BasePermissions,Is.EqualTo(baseperm), "Assert.That(t.BasePermissions,Is.EqualTo(baseperm))");
Assert.That(t.CreationDate,Is.EqualTo(creationd), "Assert.That(t.CreationDate,Is.EqualTo(creationd))");
Assert.That(t.CreatorID,Is.EqualTo(creator), "Assert.That(t.CreatorID,Is.EqualTo(creator))");
Assert.That(t.Description,Is.EqualTo(name), "Assert.That(t.Description,Is.EqualTo(name))");
Assert.That(t.EveryonePermissions,Is.EqualTo(eoperm), "Assert.That(t.EveryonePermissions,Is.EqualTo(eoperm))");
Assert.That(t.Flags,Is.EqualTo(flags), "Assert.That(t.Flags,Is.EqualTo(flags))");
Assert.That(t.GroupID,Is.EqualTo(sog.RootPart.GroupID), "Assert.That(t.GroupID,Is.EqualTo(sog.RootPart.GroupID))");
// Where is this group permissions??
// Assert.That(t.GroupPermissions,Is.EqualTo(), "Assert.That(t.GroupPermissions,Is.EqualTo())");
Assert.That(t.Type,Is.EqualTo(assettype), "Assert.That(t.Type,Is.EqualTo(assettype))");
Assert.That(t.InvType, Is.EqualTo(invtype), "Assert.That(t.InvType, Is.EqualTo(invtype))");
Assert.That(t.ItemID, Is.EqualTo(id), "Assert.That(t.ItemID, Is.EqualTo(id))");
Assert.That(t.LastOwnerID, Is.EqualTo(sog.RootPart.LastOwnerID), "Assert.That(t.LastOwnerID, Is.EqualTo(sog.RootPart.LastOwnerID))");
Assert.That(t.NextPermissions, Is.EqualTo(nextperm), "Assert.That(t.NextPermissions, Is.EqualTo(nextperm))");
// Ownership changes when you drop an object into an object
// owned by someone else
Assert.That(t.OwnerID,Is.EqualTo(sog.RootPart.OwnerID), "Assert.That(t.OwnerID,Is.EqualTo(sog.RootPart.OwnerID))");
// Assert.That(t.CurrentPermissions, Is.EqualTo(curperm | 16), "Assert.That(t.CurrentPermissions, Is.EqualTo(curperm | 8))");
Assert.That(t.ParentID,Is.EqualTo(sog.RootPart.FolderID), "Assert.That(t.ParentID,Is.EqualTo(sog.RootPart.FolderID))");
Assert.That(t.ParentPartID,Is.EqualTo(sog.RootPart.UUID), "Assert.That(t.ParentPartID,Is.EqualTo(sog.RootPart.UUID))");
}
[Test]
[ExpectedException(typeof(ArgumentException))]
public void T026_PrimInventoryMany()
{
UUID i1,i2,i3,i4;
i1 = UUID.Random();
i2 = UUID.Random();
i3 = UUID.Random();
i4 = i3;
InventoryItemBase ib1 = NewItem(i1, zero, zero, RandomName(), zero);
InventoryItemBase ib2 = NewItem(i2, zero, zero, RandomName(), zero);
InventoryItemBase ib3 = NewItem(i3, zero, zero, RandomName(), zero);
InventoryItemBase ib4 = NewItem(i4, zero, zero, RandomName(), zero);
SceneObjectGroup sog = FindSOG("object1", region1);
Assert.That(sog.AddInventoryItem(null, sog.RootPart.LocalId, ib1, zero), Is.True);
Assert.That(sog.AddInventoryItem(null, sog.RootPart.LocalId, ib2, zero), Is.True);
Assert.That(sog.AddInventoryItem(null, sog.RootPart.LocalId, ib3, zero), Is.True);
Assert.That(sog.AddInventoryItem(null, sog.RootPart.LocalId, ib4, zero), Is.True);
TaskInventoryItem t1 = sog.GetInventoryItem(sog.RootPart.LocalId, i1);
Assert.That(t1.Name, Is.EqualTo(ib1.Name), "Assert.That(t1.Name, Is.EqualTo(ib1.Name))");
TaskInventoryItem t2 = sog.GetInventoryItem(sog.RootPart.LocalId, i2);
Assert.That(t2.Name, Is.EqualTo(ib2.Name), "Assert.That(t2.Name, Is.EqualTo(ib2.Name))");
TaskInventoryItem t3 = sog.GetInventoryItem(sog.RootPart.LocalId, i3);
Assert.That(t3.Name, Is.EqualTo(ib3.Name), "Assert.That(t3.Name, Is.EqualTo(ib3.Name))");
TaskInventoryItem t4 = sog.GetInventoryItem(sog.RootPart.LocalId, i4);
Assert.That(t4, Is.Null);
}
[Test]
public void T052_RemoveObject()
{
db.RemoveObject(prim1, region1);
SceneObjectGroup sog = FindSOG("object1", region1);
Assert.That(sog, Is.Null);
}
[Test]
public void T100_DefaultRegionInfo()
{
RegionSettings r1 = db.LoadRegionSettings(region1);
Assert.That(r1.RegionUUID, Is.EqualTo(region1), "Assert.That(r1.RegionUUID, Is.EqualTo(region1))");
RegionSettings r2 = db.LoadRegionSettings(region2);
Assert.That(r2.RegionUUID, Is.EqualTo(region2), "Assert.That(r2.RegionUUID, Is.EqualTo(region2))");
}
[Test]
public void T101_UpdateRegionInfo()
{
int agentlimit = random.Next();
double objectbonus = random.Next();
int maturity = random.Next();
UUID tertex1 = UUID.Random();
UUID tertex2 = UUID.Random();
UUID tertex3 = UUID.Random();
UUID tertex4 = UUID.Random();
double elev1nw = random.Next();
double elev2nw = random.Next();
double elev1ne = random.Next();
double elev2ne = random.Next();
double elev1se = random.Next();
double elev2se = random.Next();
double elev1sw = random.Next();
double elev2sw = random.Next();
double waterh = random.Next();
double terrainraise = random.Next();
double terrainlower = random.Next();
Vector3 sunvector = new Vector3((float)Math.Round(random.NextDouble(),5),(float)Math.Round(random.NextDouble(),5),(float)Math.Round(random.NextDouble(),5));
UUID terimgid = UUID.Random();
double sunpos = random.Next();
UUID cov = UUID.Random();
RegionSettings r1 = db.LoadRegionSettings(region1);
r1.BlockTerraform = true;
r1.BlockFly = true;
r1.AllowDamage = true;
r1.RestrictPushing = true;
r1.AllowLandResell = false;
r1.AllowLandJoinDivide = false;
r1.BlockShowInSearch = true;
r1.AgentLimit = agentlimit;
r1.ObjectBonus = objectbonus;
r1.Maturity = maturity;
r1.DisableScripts = true;
r1.DisableCollisions = true;
r1.DisablePhysics = true;
r1.TerrainTexture1 = tertex1;
r1.TerrainTexture2 = tertex2;
r1.TerrainTexture3 = tertex3;
r1.TerrainTexture4 = tertex4;
r1.Elevation1NW = elev1nw;
r1.Elevation2NW = elev2nw;
r1.Elevation1NE = elev1ne;
r1.Elevation2NE = elev2ne;
r1.Elevation1SE = elev1se;
r1.Elevation2SE = elev2se;
r1.Elevation1SW = elev1sw;
r1.Elevation2SW = elev2sw;
r1.WaterHeight = waterh;
r1.TerrainRaiseLimit = terrainraise;
r1.TerrainLowerLimit = terrainlower;
r1.UseEstateSun = false;
r1.Sandbox = true;
r1.SunVector = sunvector;
r1.TerrainImageID = terimgid;
r1.FixedSun = true;
r1.SunPosition = sunpos;
r1.Covenant = cov;
db.StoreRegionSettings(r1);
RegionSettings r1a = db.LoadRegionSettings(region1);
Assert.That(r1a.RegionUUID, Is.EqualTo(region1), "Assert.That(r1a.RegionUUID, Is.EqualTo(region1))");
Assert.That(r1a.BlockTerraform,Is.True);
Assert.That(r1a.BlockFly,Is.True);
Assert.That(r1a.AllowDamage,Is.True);
Assert.That(r1a.RestrictPushing,Is.True);
Assert.That(r1a.AllowLandResell,Is.False);
Assert.That(r1a.AllowLandJoinDivide,Is.False);
Assert.That(r1a.BlockShowInSearch,Is.True);
Assert.That(r1a.AgentLimit,Is.EqualTo(agentlimit), "Assert.That(r1a.AgentLimit,Is.EqualTo(agentlimit))");
Assert.That(r1a.ObjectBonus,Is.EqualTo(objectbonus), "Assert.That(r1a.ObjectBonus,Is.EqualTo(objectbonus))");
Assert.That(r1a.Maturity,Is.EqualTo(maturity), "Assert.That(r1a.Maturity,Is.EqualTo(maturity))");
Assert.That(r1a.DisableScripts,Is.True);
Assert.That(r1a.DisableCollisions,Is.True);
Assert.That(r1a.DisablePhysics,Is.True);
Assert.That(r1a.TerrainTexture1,Is.EqualTo(tertex1), "Assert.That(r1a.TerrainTexture1,Is.EqualTo(tertex1))");
Assert.That(r1a.TerrainTexture2,Is.EqualTo(tertex2), "Assert.That(r1a.TerrainTexture2,Is.EqualTo(tertex2))");
Assert.That(r1a.TerrainTexture3,Is.EqualTo(tertex3), "Assert.That(r1a.TerrainTexture3,Is.EqualTo(tertex3))");
Assert.That(r1a.TerrainTexture4,Is.EqualTo(tertex4), "Assert.That(r1a.TerrainTexture4,Is.EqualTo(tertex4))");
Assert.That(r1a.Elevation1NW,Is.EqualTo(elev1nw), "Assert.That(r1a.Elevation1NW,Is.EqualTo(elev1nw))");
Assert.That(r1a.Elevation2NW,Is.EqualTo(elev2nw), "Assert.That(r1a.Elevation2NW,Is.EqualTo(elev2nw))");
Assert.That(r1a.Elevation1NE,Is.EqualTo(elev1ne), "Assert.That(r1a.Elevation1NE,Is.EqualTo(elev1ne))");
Assert.That(r1a.Elevation2NE,Is.EqualTo(elev2ne), "Assert.That(r1a.Elevation2NE,Is.EqualTo(elev2ne))");
Assert.That(r1a.Elevation1SE,Is.EqualTo(elev1se), "Assert.That(r1a.Elevation1SE,Is.EqualTo(elev1se))");
Assert.That(r1a.Elevation2SE,Is.EqualTo(elev2se), "Assert.That(r1a.Elevation2SE,Is.EqualTo(elev2se))");
Assert.That(r1a.Elevation1SW,Is.EqualTo(elev1sw), "Assert.That(r1a.Elevation1SW,Is.EqualTo(elev1sw))");
Assert.That(r1a.Elevation2SW,Is.EqualTo(elev2sw), "Assert.That(r1a.Elevation2SW,Is.EqualTo(elev2sw))");
Assert.That(r1a.WaterHeight,Is.EqualTo(waterh), "Assert.That(r1a.WaterHeight,Is.EqualTo(waterh))");
Assert.That(r1a.TerrainRaiseLimit,Is.EqualTo(terrainraise), "Assert.That(r1a.TerrainRaiseLimit,Is.EqualTo(terrainraise))");
Assert.That(r1a.TerrainLowerLimit,Is.EqualTo(terrainlower), "Assert.That(r1a.TerrainLowerLimit,Is.EqualTo(terrainlower))");
Assert.That(r1a.UseEstateSun,Is.False);
Assert.That(r1a.Sandbox,Is.True);
Assert.That(r1a.SunVector,Is.EqualTo(sunvector), "Assert.That(r1a.SunVector,Is.EqualTo(sunvector))");
//Assert.That(r1a.TerrainImageID,Is.EqualTo(terimgid), "Assert.That(r1a.TerrainImageID,Is.EqualTo(terimgid))");
Assert.That(r1a.FixedSun,Is.True);
Assert.That(r1a.SunPosition, Is.EqualTo(sunpos), "Assert.That(r1a.SunPosition, Is.EqualTo(sunpos))");
Assert.That(r1a.Covenant, Is.EqualTo(cov), "Assert.That(r1a.Covenant, Is.EqualTo(cov))");
}
[Test]
public void T300_NoTerrain()
{
Assert.That(db.LoadTerrain(zero), Is.Null);
Assert.That(db.LoadTerrain(region1), Is.Null);
Assert.That(db.LoadTerrain(region2), Is.Null);
Assert.That(db.LoadTerrain(UUID.Random()), Is.Null);
}
[Test]
public void T301_CreateTerrain()
{
double[,] t1 = GenTerrain(height1);
db.StoreTerrain(t1, region1);
Assert.That(db.LoadTerrain(zero), Is.Null);
Assert.That(db.LoadTerrain(region1), Is.Not.Null);
Assert.That(db.LoadTerrain(region2), Is.Null);
Assert.That(db.LoadTerrain(UUID.Random()), Is.Null);
}
[Test]
public void T302_FetchTerrain()
{
double[,] baseterrain1 = GenTerrain(height1);
double[,] baseterrain2 = GenTerrain(height2);
double[,] t1 = db.LoadTerrain(region1);
Assert.That(CompareTerrain(t1, baseterrain1), Is.True);
Assert.That(CompareTerrain(t1, baseterrain2), Is.False);
}
[Test]
public void T303_UpdateTerrain()
{
double[,] baseterrain1 = GenTerrain(height1);
double[,] baseterrain2 = GenTerrain(height2);
db.StoreTerrain(baseterrain2, region1);
double[,] t1 = db.LoadTerrain(region1);
Assert.That(CompareTerrain(t1, baseterrain1), Is.False);
Assert.That(CompareTerrain(t1, baseterrain2), Is.True);
}
[Test]
public void T400_EmptyLand()
{
Assert.That(db.LoadLandObjects(zero).Count, Is.EqualTo(0), "Assert.That(db.LoadLandObjects(zero).Count, Is.EqualTo(0))");
Assert.That(db.LoadLandObjects(region1).Count, Is.EqualTo(0), "Assert.That(db.LoadLandObjects(region1).Count, Is.EqualTo(0))");
Assert.That(db.LoadLandObjects(region2).Count, Is.EqualTo(0), "Assert.That(db.LoadLandObjects(region2).Count, Is.EqualTo(0))");
Assert.That(db.LoadLandObjects(UUID.Random()).Count, Is.EqualTo(0), "Assert.That(db.LoadLandObjects(UUID.Random()).Count, Is.EqualTo(0))");
}
// TODO: we should have real land tests, but Land is so
// intermingled with scene that you can't test it without a
// valid scene. That requires some disagregation.
//************************************************************************************//
// Extra private methods
private double[,] GenTerrain(double value)
{
double[,] terret = new double[Constants.RegionSize, Constants.RegionSize];
terret.Initialize();
for (int x = 0; x < Constants.RegionSize; x++)
for (int y = 0; y < Constants.RegionSize; y++)
terret[x,y] = value;
return terret;
}
private bool CompareTerrain(double[,] one, double[,] two)
{
for (int x = 0; x < Constants.RegionSize; x++)
for (int y = 0; y < Constants.RegionSize; y++)
if (one[x,y] != two[x,y])
return false;
return true;
}
private SceneObjectGroup FindSOG(string name, UUID r)
{
List<SceneObjectGroup> objs = db.LoadObjects(r);
foreach (SceneObjectGroup sog in objs)
{
SceneObjectPart p = sog.RootPart;
if (p.Name == name) {
RegionInfo regionInfo = new RegionInfo();
regionInfo.RegionID = r;
regionInfo.RegionLocX = 0;
regionInfo.RegionLocY = 0;
Scene scene = new Scene(regionInfo);
sog.SetScene(scene);
return sog;
}
}
return null;
}
// This builds a minimalistic Prim, 1 SOG with 1 root SOP. A
// common failure case is people adding new fields that aren't
// initialized, but have non-null db constraints. We should
// honestly be passing more and more null things in here.
//
// Please note that in Sqlite.BuildPrim there is a commented out inline version
// of this so you can debug and step through the build process and check the fields
//
// Real World Value: Tests for situation where extending a SceneObjectGroup/SceneObjectPart
// causes the application to crash at the database layer because of null values
// in NOT NULL fields
//
private SceneObjectGroup NewSOG(string name, UUID uuid, UUID regionId)
{
RegionInfo regionInfo = new RegionInfo();
regionInfo.RegionID = regionId;
regionInfo.RegionLocX = 0;
regionInfo.RegionLocY = 0;
Scene scene = new Scene(regionInfo);
SceneObjectPart sop = new SceneObjectPart();
sop.Name = name;
sop.Description = name;
sop.Text = RandomName();
sop.SitName = RandomName();
sop.TouchName = RandomName();
sop.UUID = uuid;
sop.Shape = PrimitiveBaseShape.Default;
SceneObjectGroup sog = new SceneObjectGroup(sop);
sog.SetScene(scene);
return sog;
}
private SceneObjectPart NewSOP(string name, UUID uuid)
{
SceneObjectPart sop = new SceneObjectPart();
sop.Name = name;
sop.Description = name;
sop.Text = RandomName();
sop.SitName = RandomName();
sop.TouchName = RandomName();
sop.UUID = uuid;
sop.Shape = PrimitiveBaseShape.Default;
return sop;
}
// These are copied from the Inventory Item tests
private InventoryItemBase NewItem(UUID id, UUID parent, UUID owner, string name, UUID asset)
{
InventoryItemBase i = new InventoryItemBase();
i.ID = id;
i.Folder = parent;
i.Owner = owner;
i.CreatorId = owner.ToString();
i.Name = name;
i.Description = name;
i.AssetID = asset;
return i;
}
private static string RandomName()
{
StringBuilder name = new StringBuilder();
int size = random.Next(5,12);
char ch ;
for (int i=0; i<size; i++)
{
ch = Convert.ToChar(Convert.ToInt32(Math.Floor(26 * random.NextDouble() + 65))) ;
name.Append(ch);
}
return name.ToString();
}
// private InventoryFolderBase NewFolder(UUID id, UUID parent, UUID owner, string name)
// {
// InventoryFolderBase f = new InventoryFolderBase();
// f.ID = id;
// f.ParentID = parent;
// f.Owner = owner;
// f.Name = name;
// return f;
// }
}
}
| |
using System;
using System.Text;
using UnityEngine;
using ChartboostSDK;
using System.Collections.Generic;
public class ChartboostExample: MonoBehaviour
{
public GameObject inPlayIcon;
public GameObject inPlayText;
public Texture2D logo;
private CBInPlay inPlayAd;
public Vector2 scrollPosition = Vector2.zero;
private List<string> delegateHistory;
private bool hasInterstitial = false;
private bool hasMoreApps = false;
private bool hasRewardedVideo = false;
private bool hasInPlay = false;
private int frameCount = 0;
private bool ageGate = false;
private bool autocache = true;
private bool activeAgeGate = false;
private bool showInterstitial = true;
private bool showMoreApps = true;
private bool showRewardedVideo = true;
private int BANNER_HEIGHT = 110;
private int REQUIRED_HEIGHT = 650;
private int ELEMENT_WIDTH = 190;
private Rect scrollRect;
private Rect scrollArea;
private Vector3 guiScale;
private float scale;
#if UNITY_IPHONE
private CBStatusBarBehavior statusBar = CBStatusBarBehavior.Ignore;
#endif
void OnEnable() {
SetupDelegates();
}
void Start() {
delegateHistory = new List<string>();
Chartboost.setShouldPauseClickForConfirmation(ageGate);
Chartboost.setAutoCacheAds(autocache);
}
void SetupDelegates()
{
// Listen to all impression-related events
Chartboost.didFailToLoadInterstitial += didFailToLoadInterstitial;
Chartboost.didDismissInterstitial += didDismissInterstitial;
Chartboost.didCloseInterstitial += didCloseInterstitial;
Chartboost.didClickInterstitial += didClickInterstitial;
Chartboost.didCacheInterstitial += didCacheInterstitial;
Chartboost.shouldDisplayInterstitial += shouldDisplayInterstitial;
Chartboost.didDisplayInterstitial += didDisplayInterstitial;
Chartboost.didFailToLoadMoreApps += didFailToLoadMoreApps;
Chartboost.didDismissMoreApps += didDismissMoreApps;
Chartboost.didCloseMoreApps += didCloseMoreApps;
Chartboost.didClickMoreApps += didClickMoreApps;
Chartboost.didCacheMoreApps += didCacheMoreApps;
Chartboost.shouldDisplayMoreApps += shouldDisplayMoreApps;
Chartboost.didDisplayMoreApps += didDisplayMoreApps;
Chartboost.didFailToRecordClick += didFailToRecordClick;
Chartboost.didFailToLoadRewardedVideo += didFailToLoadRewardedVideo;
Chartboost.didDismissRewardedVideo += didDismissRewardedVideo;
Chartboost.didCloseRewardedVideo += didCloseRewardedVideo;
Chartboost.didClickRewardedVideo += didClickRewardedVideo;
Chartboost.didCacheRewardedVideo += didCacheRewardedVideo;
Chartboost.shouldDisplayRewardedVideo += shouldDisplayRewardedVideo;
Chartboost.didCompleteRewardedVideo += didCompleteRewardedVideo;
Chartboost.didDisplayRewardedVideo += didDisplayRewardedVideo;
Chartboost.didCacheInPlay += didCacheInPlay;
Chartboost.didFailToLoadInPlay += didFailToLoadInPlay;
Chartboost.didPauseClickForConfirmation += didPauseClickForConfirmation;
Chartboost.willDisplayVideo += willDisplayVideo;
#if UNITY_IPHONE
Chartboost.didCompleteAppStoreSheetFlow += didCompleteAppStoreSheetFlow;
#endif
}
private Vector2 beginFinger; // finger
private float deltaFingerY; // finger
private Vector2 beginPanel; // scrollpanel
private Vector2 latestPanel; // scrollpanel
void Update() {
UpdateScrolling();
frameCount++;
if( frameCount > 30 )
{
// update these periodically and not every frame
hasInterstitial = Chartboost.hasInterstitial(CBLocation.Default);
hasMoreApps = Chartboost.hasMoreApps(CBLocation.Default);
hasRewardedVideo = Chartboost.hasRewardedVideo(CBLocation.Default);
hasInPlay = Chartboost.hasInPlay(CBLocation.Default);
frameCount = 0;
}
}
void UpdateScrolling()
{
if ( Input.touchCount != 1 ) return;
Touch touch = Input.touches[0];
if ( touch.phase == TouchPhase.Began )
{
beginFinger = touch.position;
beginPanel = scrollPosition;
}
if ( touch.phase == TouchPhase.Moved )
{
Vector2 newFingerScreenPos = touch.position;
deltaFingerY = newFingerScreenPos.y - beginFinger.y;
float newY = beginPanel.y + (deltaFingerY / scale);
latestPanel = beginPanel;
latestPanel.y = newY;
scrollPosition = latestPanel;
}
}
void AddLog(string text)
{
Debug.Log(text);
delegateHistory.Insert(0, text + "\n");
int count = delegateHistory.Count;
if( count > 20 )
{
delegateHistory.RemoveRange(20, count-20);
}
}
void OnGUI() {
/*
#if UNITY_ANDROID
// Disable user input for GUI when impressions are visible
// This is only necessary on Android if we have disabled impression activities
// by having called CBBinding.init(ID, SIG, false), as that allows touch
// events to leak through Chartboost impressions
GUI.enabled = !Chartboost.isImpressionVisible();
#endif
*/
//get the screen's width
float sWidth = Screen.width;
float sHeight = Screen.height;
//calculate the rescale ratio
float guiRatioX = sWidth/240.0f;
float guiRatioY = sHeight/210.0f;
float myScale = Mathf.Min(6.0f, Mathf.Min(guiRatioX, guiRatioY));
if(scale != myScale) {
scale = myScale;
guiScale = new Vector3(scale,scale,1);
}
GUI.matrix = Matrix4x4.Scale(guiScale);
ELEMENT_WIDTH = (int)(sWidth/scale)-30;
float height = REQUIRED_HEIGHT;
if(inPlayAd != null) {
// add space for the icon
height += 60;
}
scrollRect = new Rect(0, BANNER_HEIGHT, ELEMENT_WIDTH+30, sHeight/scale-BANNER_HEIGHT);
scrollArea = new Rect(-10, BANNER_HEIGHT, ELEMENT_WIDTH, height);
LayoutHeader();
if( activeAgeGate )
{
GUI.ModalWindow(1, new Rect(0, 0, Screen.width, Screen.height), LayoutAgeGate, "Age Gate");
return;
}
scrollPosition = GUI.BeginScrollView(scrollRect, scrollPosition, scrollArea);
LayoutButtons();
LayoutToggles();
GUI.EndScrollView();
}
void LayoutHeader()
{
// A view with some debug information
GUILayout.Label(logo, GUILayout.Height(30), GUILayout.Width(ELEMENT_WIDTH+20));
String text = "";
foreach( String entry in delegateHistory)
{
text += entry;
}
GUILayout.TextArea( text, GUILayout.Height(70), GUILayout.Width(ELEMENT_WIDTH+20));
}
void LayoutToggles()
{
GUILayout.Space(5);
GUILayout.Label("Options:");
showInterstitial = GUILayout.Toggle(showInterstitial, "Should Display Interstitial");
showMoreApps = GUILayout.Toggle(showMoreApps, "Should Display More Apps");
showRewardedVideo = GUILayout.Toggle(showRewardedVideo, "Should Display Rewarded Video");
if( GUILayout.Toggle(ageGate, "Should Pause for AgeGate") != ageGate )
{
ageGate = !ageGate; // toggle
Chartboost.setShouldPauseClickForConfirmation(ageGate);
}
if( GUILayout.Toggle(autocache, "Auto cache ads") != autocache )
{
autocache = !autocache; // toggle
Chartboost.setAutoCacheAds(autocache);
}
#if UNITY_IPHONE
GUILayout.Label("Status Bar Behavior:");
int slider = Mathf.RoundToInt(GUILayout.HorizontalSlider((int)statusBar, 0, 2, GUILayout.Width(ELEMENT_WIDTH/2)));
if( slider != (int)statusBar )
{
statusBar = (CBStatusBarBehavior)slider;
Chartboost.setStatusBarBehavior(statusBar);
switch(statusBar)
{
case CBStatusBarBehavior.Ignore:
AddLog("set to Ignore");
break;
case CBStatusBarBehavior.RespectButtons:
AddLog("set to RespectButtons");
break;
case CBStatusBarBehavior.Respect:
AddLog("set to Respect");
break;
}
}
#endif
}
void LayoutButtons()
{
// The view with buttons to trigger the main Chartboost API calls
GUILayout.Space(5);
GUILayout.Label("Has Interstitial: " + hasInterstitial);
if (GUILayout.Button("Cache Interstitial", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.cacheInterstitial(CBLocation.Default);
}
if (GUILayout.Button("Show Interstitial", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.showInterstitial(CBLocation.Default);
}
GUILayout.Space(5);
GUILayout.Label("Has MoreApps: " + hasMoreApps);
if (GUILayout.Button("Cache More Apps", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.cacheMoreApps(CBLocation.Default);
}
if (GUILayout.Button("Show More Apps", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.showMoreApps(CBLocation.Default);
}
GUILayout.Space(5);
GUILayout.Label("Has Rewarded Video: " + hasRewardedVideo);
if (GUILayout.Button("Cache Rewarded Video", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.cacheRewardedVideo(CBLocation.Default);
}
if (GUILayout.Button("Show Rewarded Video", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.showRewardedVideo(CBLocation.Default);
}
GUILayout.Space(5);
GUILayout.Label("Has InPlay: " + hasInPlay);
if (GUILayout.Button("Cache InPlay Ad", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.cacheInPlay(CBLocation.Default);
}
if (GUILayout.Button("Show InPlay Ad", GUILayout.Width(ELEMENT_WIDTH))) {
inPlayAd = Chartboost.getInPlay(CBLocation.Default);
if(inPlayAd != null) {
inPlayAd.show();
}
}
if(inPlayAd != null) {
// Set the texture of InPlay Ad Icon
// Link its onClick() event with inPlay's click()
GUILayout.Label("app: " + inPlayAd.appName);
if(GUILayout.Button(inPlayAd.appIcon, GUILayout.Width(ELEMENT_WIDTH))) {
inPlayAd.click();
}
}
GUILayout.Space(5);
GUILayout.Label("Post install events:");
if (GUILayout.Button("Send PIA Main Level Event", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.trackLevelInfo("Test Data", CBLevelType.HIGHEST_LEVEL_REACHED, 1, "Test Send mail level Information");
}
if (GUILayout.Button("Send PIA Sub Level Event", GUILayout.Width(ELEMENT_WIDTH))) {
Chartboost.trackLevelInfo("Test Data", CBLevelType.HIGHEST_LEVEL_REACHED, 1, 2, "Test Send sub level Information");
}
if (GUILayout.Button("Track IAP", GUILayout.Width(ELEMENT_WIDTH))) {
TrackIAP();
}
}
void LayoutAgeGate(int windowID)
{
GUILayout.Space(BANNER_HEIGHT);
GUILayout.Label("Want to pass the age gate?");
GUILayout.BeginHorizontal(GUILayout.Width(ELEMENT_WIDTH));
if( GUILayout.Button("YES") )
{
Chartboost.didPassAgeGate(true);
activeAgeGate = false;
}
if( GUILayout.Button("NO") )
{
Chartboost.didPassAgeGate(false);
activeAgeGate = false;
}
GUILayout.EndHorizontal();
}
void OnDisable() {
// Remove event handlers
Chartboost.didFailToLoadInterstitial -= didFailToLoadInterstitial;
Chartboost.didDismissInterstitial -= didDismissInterstitial;
Chartboost.didCloseInterstitial -= didCloseInterstitial;
Chartboost.didClickInterstitial -= didClickInterstitial;
Chartboost.didCacheInterstitial -= didCacheInterstitial;
Chartboost.shouldDisplayInterstitial -= shouldDisplayInterstitial;
Chartboost.didDisplayInterstitial -= didDisplayInterstitial;
Chartboost.didFailToLoadMoreApps -= didFailToLoadMoreApps;
Chartboost.didDismissMoreApps -= didDismissMoreApps;
Chartboost.didCloseMoreApps -= didCloseMoreApps;
Chartboost.didClickMoreApps -= didClickMoreApps;
Chartboost.didCacheMoreApps -= didCacheMoreApps;
Chartboost.shouldDisplayMoreApps -= shouldDisplayMoreApps;
Chartboost.didDisplayMoreApps -= didDisplayMoreApps;
Chartboost.didFailToRecordClick -= didFailToRecordClick;
Chartboost.didFailToLoadRewardedVideo -= didFailToLoadRewardedVideo;
Chartboost.didDismissRewardedVideo -= didDismissRewardedVideo;
Chartboost.didCloseRewardedVideo -= didCloseRewardedVideo;
Chartboost.didClickRewardedVideo -= didClickRewardedVideo;
Chartboost.didCacheRewardedVideo -= didCacheRewardedVideo;
Chartboost.shouldDisplayRewardedVideo -= shouldDisplayRewardedVideo;
Chartboost.didCompleteRewardedVideo -= didCompleteRewardedVideo;
Chartboost.didDisplayRewardedVideo -= didDisplayRewardedVideo;
Chartboost.didCacheInPlay -= didCacheInPlay;
Chartboost.didFailToLoadInPlay -= didFailToLoadInPlay;
Chartboost.didPauseClickForConfirmation -= didPauseClickForConfirmation;
Chartboost.willDisplayVideo -= willDisplayVideo;
#if UNITY_IPHONE
Chartboost.didCompleteAppStoreSheetFlow -= didCompleteAppStoreSheetFlow;
#endif
}
void didFailToLoadInterstitial(CBLocation location, CBImpressionError error) {
AddLog(string.Format("didFailToLoadInterstitial: {0} at location {1}", error, location));
}
void didDismissInterstitial(CBLocation location) {
AddLog("didDismissInterstitial: " + location);
}
void didCloseInterstitial(CBLocation location) {
AddLog("didCloseInterstitial: " + location);
}
void didClickInterstitial(CBLocation location) {
AddLog("didClickInterstitial: " + location);
}
void didCacheInterstitial(CBLocation location) {
AddLog("didCacheInterstitial: " + location);
}
bool shouldDisplayInterstitial(CBLocation location) {
// return true if you want to allow the interstitial to be displayed
AddLog("shouldDisplayInterstitial @" + location + " : " + showInterstitial);
return showInterstitial;
}
void didDisplayInterstitial(CBLocation location){
AddLog("didDisplayInterstitial: " + location);
}
void didFailToLoadMoreApps(CBLocation location, CBImpressionError error) {
AddLog(string.Format("didFailToLoadMoreApps: {0} at location: {1}", error, location));
}
void didDismissMoreApps(CBLocation location) {
AddLog(string.Format("didDismissMoreApps at location: {0}", location));
}
void didCloseMoreApps(CBLocation location) {
AddLog(string.Format("didCloseMoreApps at location: {0}", location));
}
void didClickMoreApps(CBLocation location) {
AddLog(string.Format("didClickMoreApps at location: {0}", location));
}
void didCacheMoreApps(CBLocation location) {
AddLog(string.Format("didCacheMoreApps at location: {0}", location));
}
bool shouldDisplayMoreApps(CBLocation location) {
AddLog(string.Format("shouldDisplayMoreApps at location: {0}: {1}", location, showMoreApps));
return showMoreApps;
}
void didDisplayMoreApps(CBLocation location){
AddLog("didDisplayMoreApps: " + location);
}
void didFailToRecordClick(CBLocation location, CBClickError error) {
AddLog(string.Format("didFailToRecordClick: {0} at location: {1}", error, location));
}
void didFailToLoadRewardedVideo(CBLocation location, CBImpressionError error) {
AddLog(string.Format("didFailToLoadRewardedVideo: {0} at location {1}", error, location));
}
void didDismissRewardedVideo(CBLocation location) {
AddLog("didDismissRewardedVideo: " + location);
}
void didCloseRewardedVideo(CBLocation location) {
AddLog("didCloseRewardedVideo: " + location);
}
void didClickRewardedVideo(CBLocation location) {
AddLog("didClickRewardedVideo: " + location);
}
void didCacheRewardedVideo(CBLocation location) {
AddLog("didCacheRewardedVideo: " + location);
}
bool shouldDisplayRewardedVideo(CBLocation location) {
AddLog("shouldDisplayRewardedVideo @" + location + " : " + showRewardedVideo);
return showRewardedVideo;
}
void didCompleteRewardedVideo(CBLocation location, int reward) {
AddLog(string.Format("didCompleteRewardedVideo: reward {0} at location {1}", reward, location));
}
void didDisplayRewardedVideo(CBLocation location){
AddLog("didDisplayRewardedVideo: " + location);
}
void didCacheInPlay(CBLocation location) {
AddLog("didCacheInPlay called: "+location);
}
void didFailToLoadInPlay(CBLocation location, CBImpressionError error) {
AddLog(string.Format("didFailToLoadInPlay: {0} at location: {1}", error, location));
}
void didPauseClickForConfirmation() {
AddLog("didPauseClickForConfirmation called");
activeAgeGate = true;
}
void willDisplayVideo(CBLocation location) {
AddLog("willDisplayVideo: " + location);
}
#if UNITY_IPHONE
void didCompleteAppStoreSheetFlow() {
AddLog("didCompleteAppStoreSheetFlow");
}
void TrackIAP() {
// The iOS receipt data from Unibill is already base64 encoded.
// Others store kit plugins may be different.
// This is a sample sandbox receipt.
string sampleReceipt = @"ewoJInNpZ25hdHVyZSIgPSAiQXBNVUJDODZBbHpOaWtWNVl0clpBTWlKUWJLOEVk
ZVhrNjNrV0JBWHpsQzhkWEd1anE0N1puSVlLb0ZFMW9OL0ZTOGNYbEZmcDlZWHQ5
aU1CZEwyNTBsUlJtaU5HYnloaXRyeVlWQVFvcmkzMlc5YVIwVDhML2FZVkJkZlcr
T3kvUXlQWkVtb05LeGhudDJXTlNVRG9VaFo4Wis0cFA3MHBlNWtVUWxiZElWaEFB
QURWekNDQTFNd2dnSTdvQU1DQVFJQ0NHVVVrVTNaV0FTMU1BMEdDU3FHU0liM0RR
RUJCUVVBTUg4eEN6QUpCZ05WQkFZVEFsVlRNUk13RVFZRFZRUUtEQXBCY0hCc1pT
QkpibU11TVNZd0pBWURWUVFMREIxQmNIQnNaU0JEWlhKMGFXWnBZMkYwYVc5dUlF
RjFkR2h2Y21sMGVURXpNREVHQTFVRUF3d3FRWEJ3YkdVZ2FWUjFibVZ6SUZOMGIz
SmxJRU5sY25ScFptbGpZWFJwYjI0Z1FYVjBhRzl5YVhSNU1CNFhEVEE1TURZeE5U
SXlNRFUxTmxvWERURTBNRFl4TkRJeU1EVTFObG93WkRFak1DRUdBMVVFQXd3YVVI
VnlZMmhoYzJWU1pXTmxhWEIwUTJWeWRHbG1hV05oZEdVeEd6QVpCZ05WQkFzTUVr
RndjR3hsSUdsVWRXNWxjeUJUZEc5eVpURVRNQkVHQTFVRUNnd0tRWEJ3YkdVZ1NX
NWpMakVMTUFrR0ExVUVCaE1DVlZNd2daOHdEUVlKS29aSWh2Y05BUUVCQlFBRGdZ
MEFNSUdKQW9HQkFNclJqRjJjdDRJclNkaVRDaGFJMGc4cHd2L2NtSHM4cC9Sd1Yv
cnQvOTFYS1ZoTmw0WElCaW1LalFRTmZnSHNEczZ5anUrK0RyS0pFN3VLc3BoTWRk
S1lmRkU1ckdYc0FkQkVqQndSSXhleFRldngzSExFRkdBdDFtb0t4NTA5ZGh4dGlJ
ZERnSnYyWWFWczQ5QjB1SnZOZHk2U01xTk5MSHNETHpEUzlvWkhBZ01CQUFHamNq
QndNQXdHQTFVZEV3RUIvd1FDTUFBd0h3WURWUjBqQkJnd0ZvQVVOaDNvNHAyQzBn
RVl0VEpyRHRkREM1RllRem93RGdZRFZSMFBBUUgvQkFRREFnZUFNQjBHQTFVZERn
UVdCQlNwZzRQeUdVakZQaEpYQ0JUTXphTittVjhrOVRBUUJnb3Foa2lHOTJOa0Jn
VUJCQUlGQURBTkJna3Foa2lHOXcwQkFRVUZBQU9DQVFFQUVhU2JQanRtTjRDL0lC
M1FFcEszMlJ4YWNDRFhkVlhBZVZSZVM1RmFaeGMrdDg4cFFQOTNCaUF4dmRXLzNl
VFNNR1k1RmJlQVlMM2V0cVA1Z204d3JGb2pYMGlreVZSU3RRKy9BUTBLRWp0cUIw
N2tMczlRVWU4Y3pSOFVHZmRNMUV1bVYvVWd2RGQ0TndOWXhMUU1nNFdUUWZna1FR
Vnk4R1had1ZIZ2JFL1VDNlk3MDUzcEdYQms1MU5QTTN3b3hoZDNnU1JMdlhqK2xv
SHNTdGNURXFlOXBCRHBtRzUrc2s0dHcrR0szR01lRU41LytlMVFUOW5wL0tsMW5q
K2FCdzdDMHhzeTBiRm5hQWQxY1NTNnhkb3J5L0NVdk02Z3RLc21uT09kcVRlc2Jw
MGJzOHNuNldxczBDOWRnY3hSSHVPTVoydG04bnBMVW03YXJnT1N6UT09IjsKCSJw
dXJjaGFzZS1pbmZvIiA9ICJld29KSW05eWFXZHBibUZzTFhCMWNtTm9ZWE5sTFdS
aGRHVXRjSE4wSWlBOUlDSXlNREV5TFRBMExUTXdJREE0T2pBMU9qVTFJRUZ0WlhK
cFkyRXZURzl6WDBGdVoyVnNaWE1pT3dvSkltOXlhV2RwYm1Gc0xYUnlZVzV6WVdO
MGFXOXVMV2xrSWlBOUlDSXhNREF3TURBd01EUTJNVGM0T0RFM0lqc0tDU0ppZG5K
eklpQTlJQ0l5TURFeU1EUXlOeUk3Q2draWRISmhibk5oWTNScGIyNHRhV1FpSUQw
Z0lqRXdNREF3TURBd05EWXhOemc0TVRjaU93b0pJbkYxWVc1MGFYUjVJaUE5SUNJ
eElqc0tDU0p2Y21sbmFXNWhiQzF3ZFhKamFHRnpaUzFrWVhSbExXMXpJaUE5SUNJ
eE16TTFOems0TXpVMU9EWTRJanNLQ1NKd2NtOWtkV04wTFdsa0lpQTlJQ0pqYjIw
dWJXbHVaRzF2WW1Gd2NDNWtiM2R1Ykc5aFpDSTdDZ2tpYVhSbGJTMXBaQ0lnUFNB
aU5USXhNVEk1T0RFeUlqc0tDU0ppYVdRaUlEMGdJbU52YlM1dGFXNWtiVzlpWVhC
d0xrMXBibVJOYjJJaU93b0pJbkIxY21Ob1lYTmxMV1JoZEdVdGJYTWlJRDBnSWpF
ek16VTNPVGd6TlRVNE5qZ2lPd29KSW5CMWNtTm9ZWE5sTFdSaGRHVWlJRDBnSWpJ
d01USXRNRFF0TXpBZ01UVTZNRFU2TlRVZ1JYUmpMMGROVkNJN0Nna2ljSFZ5WTJo
aGMyVXRaR0YwWlMxd2MzUWlJRDBnSWpJd01USXRNRFF0TXpBZ01EZzZNRFU2TlRV
Z1FXMWxjbWxqWVM5TWIzTmZRVzVuWld4bGN5STdDZ2tpYjNKcFoybHVZV3d0Y0hW
eVkyaGhjMlV0WkdGMFpTSWdQU0FpTWpBeE1pMHdOQzB6TUNBeE5Ub3dOVG8xTlNC
RmRHTXZSMDFVSWpzS2ZRPT0iOwoJImVudmlyb25tZW50IiA9ICJTYW5kYm94IjsK
CSJwb2QiID0gIjEwMCI7Cgkic2lnbmluZy1zdGF0dXMiID0gIjAiOwp9";
// Demonstrate Base64 encoding. Not necessary for the data above
// If the receipt was not base64 encoded, send encodedText not sampleReceipt
//byte[] bytesToEncode = Encoding.UTF8.GetBytes(sampleReceipt);
//string encodedText = Convert.ToBase64String(bytesToEncode);
// Send the receipt for track an In App Purchase Event
Chartboost.trackInAppAppleStorePurchaseEvent(sampleReceipt,
"sample product title", "sample product description", "1.99", "USD", "sample product identifier" );
//byte[] decodedText = Convert.FromBase64String(sampleReceipt);
//Debug.Log("Decoded: " + System.Text.Encoding.UTF8.GetString(decodedText));
//Debug.Log("Encoded: " + encodedText);
}
#elif UNITY_ANDROID
void TrackIAP() {
Debug.Log("TrackIAP");
// title, description, price, currency, productID, purchaseData, purchaseSignature
// This data should be sent after handling the results from the google store.
// This is fake data and doesn't represent a real or even imaginary purchase
Chartboost.trackInAppGooglePlayPurchaseEvent("SampleItem", "TestPurchase", "0.99", "USD", "ProductID", "PurchaseData", "PurchaseSignature");
// If you are using the Amazon store...
//Chartboost.trackInAppAmazonStorePurchaseEvent("SampleItem", "TestPurchase", "0.99", "ProductID", "UserId", "PurchaseToken");
}
#else
void TrackIAP() {
Debug.Log("TrackIAP on unsupported platform");
}
#endif
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Net;
using System.Net.Http;
using System.Net.WebSockets;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Testing;
using Microsoft.Net.Http.Headers;
using Xunit;
namespace Microsoft.AspNetCore.WebSockets.Test
{
public class WebSocketMiddlewareTests : LoggedTest
{
[Fact]
public async Task Connect_Success()
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
}
}
}
[Fact]
public async Task NegotiateSubProtocol_Success()
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
Assert.Equal("alpha, bravo, charlie", context.Request.Headers["Sec-WebSocket-Protocol"]);
var webSocket = await context.WebSockets.AcceptWebSocketAsync("Bravo");
}))
{
using (var client = new ClientWebSocket())
{
client.Options.AddSubProtocol("alpha");
client.Options.AddSubProtocol("bravo");
client.Options.AddSubProtocol("charlie");
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
// The Windows version of ClientWebSocket uses the casing from the header (Bravo)
// However, the Managed version seems match the header against the list generated by
// the AddSubProtocol calls (case-insensitively) and then use the version from
// that list as the value for SubProtocol. This is fine, but means we need to ignore case here.
// We could update our AddSubProtocols above to the same case but I think it's better to
// ensure this behavior is codified by this test.
Assert.Equal("Bravo", client.SubProtocol, ignoreCase: true);
}
}
}
[Fact]
public async Task SendEmptyData_Success()
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[0];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var orriginalData = new byte[0];
await client.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}
}
}
[Fact]
public async Task SendShortData_Success()
{
var orriginalData = Encoding.UTF8.GetBytes("Hello World");
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[orriginalData.Length];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(orriginalData.Length, result.Count);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, serverBuffer);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}
}
}
[Fact]
public async Task SendMediumData_Success()
{
var orriginalData = Encoding.UTF8.GetBytes(new string('a', 130));
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[orriginalData.Length];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(orriginalData.Length, result.Count);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, serverBuffer);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}
}
}
[Fact]
public async Task SendLongData_Success()
{
var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
var orriginalData = Encoding.UTF8.GetBytes(new string('a', 0x1FFFF));
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[orriginalData.Length];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, serverBuffer);
tcs.SetResult();
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}
// Wait to close the server otherwise the app could throw if it takes longer than the shutdown timeout
await tcs.Task;
}
}
[Fact]
public async Task SendFragmentedData_Success()
{
var orriginalData = Encoding.UTF8.GetBytes("Hello World");
var tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[orriginalData.Length];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.False(result.EndOfMessage);
Assert.Equal(2, result.Count);
int totalReceived = result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
tcs.SetResult();
result = await webSocket.ReceiveAsync(
new ArraySegment<byte>(serverBuffer, totalReceived, serverBuffer.Length - totalReceived), CancellationToken.None);
Assert.False(result.EndOfMessage);
Assert.Equal(2, result.Count);
totalReceived += result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
tcs.SetResult();
result = await webSocket.ReceiveAsync(
new ArraySegment<byte>(serverBuffer, totalReceived, serverBuffer.Length - totalReceived), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(7, result.Count);
totalReceived += result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, serverBuffer);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.SendAsync(new ArraySegment<byte>(orriginalData, 0, 2), WebSocketMessageType.Binary, false, CancellationToken.None);
await tcs.Task;
tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
await client.SendAsync(new ArraySegment<byte>(orriginalData, 2, 2), WebSocketMessageType.Binary, false, CancellationToken.None);
await tcs.Task;
tcs = new TaskCompletionSource(TaskCreationOptions.RunContinuationsAsynchronously);
await client.SendAsync(new ArraySegment<byte>(orriginalData, 4, 7), WebSocketMessageType.Binary, true, CancellationToken.None);
}
}
}
[Fact]
public async Task ReceiveShortData_Success()
{
var orriginalData = Encoding.UTF8.GetBytes("Hello World");
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
await webSocket.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var clientBuffer = new byte[orriginalData.Length];
var result = await client.ReceiveAsync(new ArraySegment<byte>(clientBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(orriginalData.Length, result.Count);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, clientBuffer);
}
}
}
[Fact]
public async Task ReceiveMediumData_Success()
{
var orriginalData = Encoding.UTF8.GetBytes(new string('a', 130));
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
await webSocket.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var clientBuffer = new byte[orriginalData.Length];
var result = await client.ReceiveAsync(new ArraySegment<byte>(clientBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(orriginalData.Length, result.Count);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, clientBuffer);
}
}
}
[Fact]
public async Task ReceiveLongData()
{
var orriginalData = Encoding.UTF8.GetBytes(new string('a', 0x1FFFF));
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
await webSocket.SendAsync(new ArraySegment<byte>(orriginalData), WebSocketMessageType.Binary, true, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var clientBuffer = new byte[orriginalData.Length];
WebSocketReceiveResult result;
int receivedCount = 0;
do
{
result = await client.ReceiveAsync(new ArraySegment<byte>(clientBuffer, receivedCount, clientBuffer.Length - receivedCount), CancellationToken.None);
receivedCount += result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
}
while (!result.EndOfMessage);
Assert.Equal(orriginalData.Length, receivedCount);
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, clientBuffer);
}
}
}
[Fact]
public async Task ReceiveFragmentedData_Success()
{
var orriginalData = Encoding.UTF8.GetBytes("Hello World");
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
await webSocket.SendAsync(new ArraySegment<byte>(orriginalData, 0, 2), WebSocketMessageType.Binary, false, CancellationToken.None);
await webSocket.SendAsync(new ArraySegment<byte>(orriginalData, 2, 2), WebSocketMessageType.Binary, false, CancellationToken.None);
await webSocket.SendAsync(new ArraySegment<byte>(orriginalData, 4, 7), WebSocketMessageType.Binary, true, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var clientBuffer = new byte[orriginalData.Length];
var result = await client.ReceiveAsync(new ArraySegment<byte>(clientBuffer), CancellationToken.None);
Assert.False(result.EndOfMessage);
Assert.Equal(2, result.Count);
int totalReceived = result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
result = await client.ReceiveAsync(
new ArraySegment<byte>(clientBuffer, totalReceived, clientBuffer.Length - totalReceived), CancellationToken.None);
Assert.False(result.EndOfMessage);
Assert.Equal(2, result.Count);
totalReceived += result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
result = await client.ReceiveAsync(
new ArraySegment<byte>(clientBuffer, totalReceived, clientBuffer.Length - totalReceived), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(7, result.Count);
totalReceived += result.Count;
Assert.Equal(WebSocketMessageType.Binary, result.MessageType);
Assert.Equal(orriginalData, clientBuffer);
}
}
}
[Fact]
public async Task SendClose_Success()
{
string closeDescription = "Test Closed";
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[1024];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
Assert.Equal(WebSocketCloseStatus.NormalClosure, result.CloseStatus);
Assert.Equal(closeDescription, result.CloseStatusDescription);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, closeDescription, CancellationToken.None);
Assert.Equal(WebSocketState.CloseSent, client.State);
}
}
}
[Fact]
public async Task ReceiveClose_Success()
{
string closeDescription = "Test Closed";
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
await webSocket.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, closeDescription, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var clientBuffer = new byte[1024];
var result = await client.ReceiveAsync(new ArraySegment<byte>(clientBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
Assert.Equal(WebSocketCloseStatus.NormalClosure, result.CloseStatus);
Assert.Equal(closeDescription, result.CloseStatusDescription);
Assert.Equal(WebSocketState.CloseReceived, client.State);
}
}
}
[Fact]
public async Task CloseFromOpen_Success()
{
string closeDescription = "Test Closed";
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[1024];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
Assert.Equal(WebSocketCloseStatus.NormalClosure, result.CloseStatus);
Assert.Equal(closeDescription, result.CloseStatusDescription);
await webSocket.CloseAsync(result.CloseStatus.Value, result.CloseStatusDescription, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.CloseAsync(WebSocketCloseStatus.NormalClosure, closeDescription, CancellationToken.None);
Assert.Equal(WebSocketState.Closed, client.State);
}
}
}
[Fact]
public async Task CloseFromCloseSent_Success()
{
string closeDescription = "Test Closed";
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
var serverBuffer = new byte[1024];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
Assert.Equal(WebSocketCloseStatus.NormalClosure, result.CloseStatus);
Assert.Equal(closeDescription, result.CloseStatusDescription);
await webSocket.CloseAsync(result.CloseStatus.Value, result.CloseStatusDescription, CancellationToken.None);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
await client.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, closeDescription, CancellationToken.None);
Assert.Equal(WebSocketState.CloseSent, client.State);
await client.CloseAsync(WebSocketCloseStatus.NormalClosure, closeDescription, CancellationToken.None);
Assert.Equal(WebSocketState.Closed, client.State);
}
}
}
[Fact]
public async Task CloseFromCloseReceived_Success()
{
string closeDescription = "Test Closed";
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
await webSocket.CloseOutputAsync(WebSocketCloseStatus.NormalClosure, closeDescription, CancellationToken.None);
var serverBuffer = new byte[1024];
var result = await webSocket.ReceiveAsync(new ArraySegment<byte>(serverBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
Assert.Equal(WebSocketCloseStatus.NormalClosure, result.CloseStatus);
Assert.Equal(closeDescription, result.CloseStatusDescription);
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
var clientBuffer = new byte[1024];
var result = await client.ReceiveAsync(new ArraySegment<byte>(clientBuffer), CancellationToken.None);
Assert.True(result.EndOfMessage);
Assert.Equal(0, result.Count);
Assert.Equal(WebSocketMessageType.Close, result.MessageType);
Assert.Equal(WebSocketCloseStatus.NormalClosure, result.CloseStatus);
Assert.Equal(closeDescription, result.CloseStatusDescription);
Assert.Equal(WebSocketState.CloseReceived, client.State);
await client.CloseAsync(result.CloseStatus.Value, result.CloseStatusDescription, CancellationToken.None);
Assert.Equal(WebSocketState.Closed, client.State);
}
}
}
[Theory]
[InlineData(HttpStatusCode.OK, null)]
[InlineData(HttpStatusCode.Forbidden, "")]
[InlineData(HttpStatusCode.Forbidden, "http://e.com")]
[InlineData(HttpStatusCode.OK, "http://e.com", "http://example.com")]
[InlineData(HttpStatusCode.OK, "*")]
[InlineData(HttpStatusCode.OK, "http://e.com", "*")]
[InlineData(HttpStatusCode.OK, "http://ExAmPLE.cOm")]
public async Task OriginIsValidatedForWebSocketRequests(HttpStatusCode expectedCode, params string[] origins)
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
return Task.CompletedTask;
},
o =>
{
if (origins != null)
{
foreach (var origin in origins)
{
o.AllowedOrigins.Add(origin);
}
}
}))
{
using (var client = new HttpClient())
{
var uri = new UriBuilder(new Uri($"ws://127.0.0.1:{port}/"));
uri.Scheme = "http";
// Craft a valid WebSocket Upgrade request
using (var request = new HttpRequestMessage(HttpMethod.Get, uri.ToString()))
{
request.Headers.Connection.Clear();
request.Headers.Connection.Add("Upgrade");
request.Headers.Upgrade.Add(new System.Net.Http.Headers.ProductHeaderValue("websocket"));
request.Headers.Add(HeaderNames.SecWebSocketVersion, "13");
// SecWebSocketKey required to be 16 bytes
request.Headers.Add(HeaderNames.SecWebSocketKey, Convert.ToBase64String(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }, Base64FormattingOptions.None));
request.Headers.Add(HeaderNames.Origin, "http://example.com");
var response = await client.SendAsync(request);
Assert.Equal(expectedCode, response.StatusCode);
}
}
}
}
[Fact]
public async Task OriginIsNotValidatedForNonWebSocketRequests()
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, context =>
{
Assert.False(context.WebSockets.IsWebSocketRequest);
return Task.CompletedTask;
},
o => o.AllowedOrigins.Add("http://example.com")))
{
using (var client = new HttpClient())
{
var uri = new UriBuilder(new Uri($"ws://127.0.0.1:{port}/"));
uri.Scheme = "http";
using (var request = new HttpRequestMessage(HttpMethod.Get, uri.ToString()))
{
request.Headers.Add("Origin", "http://notexample.com");
var response = await client.SendAsync(request);
Assert.Equal(HttpStatusCode.OK, response.StatusCode);
}
}
}
}
[Fact]
public async Task CommonHeadersAreSetToInternedStrings()
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
// Use ReferenceEquals and test against the constants
Assert.Same(HeaderNames.Upgrade, context.Request.Headers.Connection.ToString());
Assert.Same(Constants.Headers.UpgradeWebSocket, context.Request.Headers.Upgrade.ToString());
Assert.Same(Constants.Headers.SupportedVersion, context.Request.Headers.SecWebSocketVersion.ToString());
}))
{
using (var client = new ClientWebSocket())
{
await client.ConnectAsync(new Uri($"ws://127.0.0.1:{port}/"), CancellationToken.None);
}
}
}
[Fact]
public async Task MultipleValueHeadersNotOverridden()
{
await using (var server = KestrelWebSocketHelpers.CreateServer(LoggerFactory, out var port, async context =>
{
Assert.True(context.WebSockets.IsWebSocketRequest);
var webSocket = await context.WebSockets.AcceptWebSocketAsync();
Assert.Equal("Upgrade, keep-alive", context.Request.Headers.Connection.ToString());
Assert.Equal("websocket, example", context.Request.Headers.Upgrade.ToString());
}))
{
using (var client = new HttpClient())
{
var uri = new UriBuilder(new Uri($"ws://127.0.0.1:{port}/"));
uri.Scheme = "http";
// Craft a valid WebSocket Upgrade request
using (var request = new HttpRequestMessage(HttpMethod.Get, uri.ToString()))
{
request.Headers.Connection.Clear();
request.Headers.Connection.Add("Upgrade");
request.Headers.Connection.Add("keep-alive");
request.Headers.Upgrade.Add(new System.Net.Http.Headers.ProductHeaderValue("websocket"));
request.Headers.Upgrade.Add(new System.Net.Http.Headers.ProductHeaderValue("example"));
request.Headers.Add(HeaderNames.SecWebSocketVersion, "13");
// SecWebSocketKey required to be 16 bytes
request.Headers.Add(HeaderNames.SecWebSocketKey, Convert.ToBase64String(new byte[] { 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16 }, Base64FormattingOptions.None));
var response = await client.SendAsync(request);
Assert.Equal(HttpStatusCode.SwitchingProtocols, response.StatusCode);
}
}
}
}
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Linq;
using System.Text;
namespace EduHub.Data.Entities
{
/// <summary>
/// Debtor Notes Data Set
/// </summary>
[GeneratedCode("EduHub Data", "0.9")]
public sealed partial class KNOTE_DRDataSet : EduHubDataSet<KNOTE_DR>
{
/// <inheritdoc />
public override string Name { get { return "KNOTE_DR"; } }
/// <inheritdoc />
public override bool SupportsEntityLastModified { get { return true; } }
internal KNOTE_DRDataSet(EduHubContext Context)
: base(Context)
{
Index_CODE = new Lazy<Dictionary<string, IReadOnlyList<KNOTE_DR>>>(() => this.ToGroupedDictionary(i => i.CODE));
Index_TID = new Lazy<Dictionary<int, KNOTE_DR>>(() => this.ToDictionary(i => i.TID));
}
/// <summary>
/// Matches CSV file headers to actions, used to deserialize <see cref="KNOTE_DR" />
/// </summary>
/// <param name="Headers">The CSV column headers</param>
/// <returns>An array of actions which deserialize <see cref="KNOTE_DR" /> fields for each CSV column header</returns>
internal override Action<KNOTE_DR, string>[] BuildMapper(IReadOnlyList<string> Headers)
{
var mapper = new Action<KNOTE_DR, string>[Headers.Count];
for (var i = 0; i < Headers.Count; i++) {
switch (Headers[i]) {
case "TID":
mapper[i] = (e, v) => e.TID = int.Parse(v);
break;
case "CODE":
mapper[i] = (e, v) => e.CODE = v;
break;
case "NOTE_DATE":
mapper[i] = (e, v) => e.NOTE_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "NOTE_MEMO":
mapper[i] = (e, v) => e.NOTE_MEMO = v;
break;
case "LW_DATE":
mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "LW_TIME":
mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v);
break;
case "LW_USER":
mapper[i] = (e, v) => e.LW_USER = v;
break;
default:
mapper[i] = MapperNoOp;
break;
}
}
return mapper;
}
/// <summary>
/// Merges <see cref="KNOTE_DR" /> delta entities
/// </summary>
/// <param name="Entities">Iterator for base <see cref="KNOTE_DR" /> entities</param>
/// <param name="DeltaEntities">List of delta <see cref="KNOTE_DR" /> entities</param>
/// <returns>A merged <see cref="IEnumerable{KNOTE_DR}"/> of entities</returns>
internal override IEnumerable<KNOTE_DR> ApplyDeltaEntities(IEnumerable<KNOTE_DR> Entities, List<KNOTE_DR> DeltaEntities)
{
HashSet<int> Index_TID = new HashSet<int>(DeltaEntities.Select(i => i.TID));
using (var deltaIterator = DeltaEntities.GetEnumerator())
{
using (var entityIterator = Entities.GetEnumerator())
{
while (deltaIterator.MoveNext())
{
var deltaClusteredKey = deltaIterator.Current.CODE;
bool yieldEntity = false;
while (entityIterator.MoveNext())
{
var entity = entityIterator.Current;
bool overwritten = Index_TID.Remove(entity.TID);
if (entity.CODE.CompareTo(deltaClusteredKey) <= 0)
{
if (!overwritten)
{
yield return entity;
}
}
else
{
yieldEntity = !overwritten;
break;
}
}
yield return deltaIterator.Current;
if (yieldEntity)
{
yield return entityIterator.Current;
}
}
while (entityIterator.MoveNext())
{
yield return entityIterator.Current;
}
}
}
}
#region Index Fields
private Lazy<Dictionary<string, IReadOnlyList<KNOTE_DR>>> Index_CODE;
private Lazy<Dictionary<int, KNOTE_DR>> Index_TID;
#endregion
#region Index Methods
/// <summary>
/// Find KNOTE_DR by CODE field
/// </summary>
/// <param name="CODE">CODE value used to find KNOTE_DR</param>
/// <returns>List of related KNOTE_DR entities</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<KNOTE_DR> FindByCODE(string CODE)
{
return Index_CODE.Value[CODE];
}
/// <summary>
/// Attempt to find KNOTE_DR by CODE field
/// </summary>
/// <param name="CODE">CODE value used to find KNOTE_DR</param>
/// <param name="Value">List of related KNOTE_DR entities</param>
/// <returns>True if the list of related KNOTE_DR entities is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByCODE(string CODE, out IReadOnlyList<KNOTE_DR> Value)
{
return Index_CODE.Value.TryGetValue(CODE, out Value);
}
/// <summary>
/// Attempt to find KNOTE_DR by CODE field
/// </summary>
/// <param name="CODE">CODE value used to find KNOTE_DR</param>
/// <returns>List of related KNOTE_DR entities, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public IReadOnlyList<KNOTE_DR> TryFindByCODE(string CODE)
{
IReadOnlyList<KNOTE_DR> value;
if (Index_CODE.Value.TryGetValue(CODE, out value))
{
return value;
}
else
{
return null;
}
}
/// <summary>
/// Find KNOTE_DR by TID field
/// </summary>
/// <param name="TID">TID value used to find KNOTE_DR</param>
/// <returns>Related KNOTE_DR entity</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public KNOTE_DR FindByTID(int TID)
{
return Index_TID.Value[TID];
}
/// <summary>
/// Attempt to find KNOTE_DR by TID field
/// </summary>
/// <param name="TID">TID value used to find KNOTE_DR</param>
/// <param name="Value">Related KNOTE_DR entity</param>
/// <returns>True if the related KNOTE_DR entity is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByTID(int TID, out KNOTE_DR Value)
{
return Index_TID.Value.TryGetValue(TID, out Value);
}
/// <summary>
/// Attempt to find KNOTE_DR by TID field
/// </summary>
/// <param name="TID">TID value used to find KNOTE_DR</param>
/// <returns>Related KNOTE_DR entity, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public KNOTE_DR TryFindByTID(int TID)
{
KNOTE_DR value;
if (Index_TID.Value.TryGetValue(TID, out value))
{
return value;
}
else
{
return null;
}
}
#endregion
#region SQL Integration
/// <summary>
/// Returns a <see cref="SqlCommand"/> which checks for the existence of a KNOTE_DR table, and if not found, creates the table and associated indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[KNOTE_DR]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1)
BEGIN
CREATE TABLE [dbo].[KNOTE_DR](
[TID] int IDENTITY NOT NULL,
[CODE] varchar(10) NOT NULL,
[NOTE_DATE] datetime NULL,
[NOTE_MEMO] varchar(MAX) NULL,
[LW_DATE] datetime NULL,
[LW_TIME] smallint NULL,
[LW_USER] varchar(128) NULL,
CONSTRAINT [KNOTE_DR_Index_TID] PRIMARY KEY NONCLUSTERED (
[TID] ASC
)
);
CREATE CLUSTERED INDEX [KNOTE_DR_Index_CODE] ON [dbo].[KNOTE_DR]
(
[CODE] ASC
);
END");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which disables all non-clustered table indexes.
/// Typically called before <see cref="SqlBulkCopy"/> to improve performance.
/// <see cref="GetSqlRebuildIndexesCommand(SqlConnection)"/> should be called to rebuild and enable indexes after performance sensitive work is completed.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>A <see cref="SqlCommand"/> which (when executed) will disable all non-clustered table indexes</returns>
public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[KNOTE_DR]') AND name = N'KNOTE_DR_Index_TID')
ALTER INDEX [KNOTE_DR_Index_TID] ON [dbo].[KNOTE_DR] DISABLE;
");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which rebuilds and enables all non-clustered table indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>A <see cref="SqlCommand"/> which (when executed) will rebuild and enable all non-clustered table indexes</returns>
public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF EXISTS (SELECT * FROM dbo.sysindexes WHERE id = OBJECT_ID(N'[dbo].[KNOTE_DR]') AND name = N'KNOTE_DR_Index_TID')
ALTER INDEX [KNOTE_DR_Index_TID] ON [dbo].[KNOTE_DR] REBUILD PARTITION = ALL;
");
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which deletes the <see cref="KNOTE_DR"/> entities passed
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <param name="Entities">The <see cref="KNOTE_DR"/> entities to be deleted</param>
public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<KNOTE_DR> Entities)
{
SqlCommand command = new SqlCommand();
int parameterIndex = 0;
StringBuilder builder = new StringBuilder();
List<int> Index_TID = new List<int>();
foreach (var entity in Entities)
{
Index_TID.Add(entity.TID);
}
builder.AppendLine("DELETE [dbo].[KNOTE_DR] WHERE");
// Index_TID
builder.Append("[TID] IN (");
for (int index = 0; index < Index_TID.Count; index++)
{
if (index != 0)
builder.Append(", ");
// TID
var parameterTID = $"@p{parameterIndex++}";
builder.Append(parameterTID);
command.Parameters.Add(parameterTID, SqlDbType.Int).Value = Index_TID[index];
}
builder.Append(");");
command.Connection = SqlConnection;
command.CommandText = builder.ToString();
return command;
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the KNOTE_DR data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the KNOTE_DR data set</returns>
public override EduHubDataSetDataReader<KNOTE_DR> GetDataSetDataReader()
{
return new KNOTE_DRDataReader(Load());
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the KNOTE_DR data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the KNOTE_DR data set</returns>
public override EduHubDataSetDataReader<KNOTE_DR> GetDataSetDataReader(List<KNOTE_DR> Entities)
{
return new KNOTE_DRDataReader(new EduHubDataSetLoadedReader<KNOTE_DR>(this, Entities));
}
// Modest implementation to primarily support SqlBulkCopy
private class KNOTE_DRDataReader : EduHubDataSetDataReader<KNOTE_DR>
{
public KNOTE_DRDataReader(IEduHubDataSetReader<KNOTE_DR> Reader)
: base (Reader)
{
}
public override int FieldCount { get { return 7; } }
public override object GetValue(int i)
{
switch (i)
{
case 0: // TID
return Current.TID;
case 1: // CODE
return Current.CODE;
case 2: // NOTE_DATE
return Current.NOTE_DATE;
case 3: // NOTE_MEMO
return Current.NOTE_MEMO;
case 4: // LW_DATE
return Current.LW_DATE;
case 5: // LW_TIME
return Current.LW_TIME;
case 6: // LW_USER
return Current.LW_USER;
default:
throw new ArgumentOutOfRangeException(nameof(i));
}
}
public override bool IsDBNull(int i)
{
switch (i)
{
case 2: // NOTE_DATE
return Current.NOTE_DATE == null;
case 3: // NOTE_MEMO
return Current.NOTE_MEMO == null;
case 4: // LW_DATE
return Current.LW_DATE == null;
case 5: // LW_TIME
return Current.LW_TIME == null;
case 6: // LW_USER
return Current.LW_USER == null;
default:
return false;
}
}
public override string GetName(int ordinal)
{
switch (ordinal)
{
case 0: // TID
return "TID";
case 1: // CODE
return "CODE";
case 2: // NOTE_DATE
return "NOTE_DATE";
case 3: // NOTE_MEMO
return "NOTE_MEMO";
case 4: // LW_DATE
return "LW_DATE";
case 5: // LW_TIME
return "LW_TIME";
case 6: // LW_USER
return "LW_USER";
default:
throw new ArgumentOutOfRangeException(nameof(ordinal));
}
}
public override int GetOrdinal(string name)
{
switch (name)
{
case "TID":
return 0;
case "CODE":
return 1;
case "NOTE_DATE":
return 2;
case "NOTE_MEMO":
return 3;
case "LW_DATE":
return 4;
case "LW_TIME":
return 5;
case "LW_USER":
return 6;
default:
throw new ArgumentOutOfRangeException(nameof(name));
}
}
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.FindSymbols;
using Microsoft.CodeAnalysis.LanguageServices;
using Microsoft.CodeAnalysis.Rename;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.CodeAnalysis.Text;
using Microsoft.VisualStudio.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.RenameTracking
{
internal sealed partial class RenameTrackingTaggerProvider
{
internal enum TriggerIdentifierKind
{
NotRenamable,
RenamableDeclaration,
RenamableReference,
}
/// <summary>
/// Determines whether the original token was a renameable identifier on a background thread
/// </summary>
private class TrackingSession : ForegroundThreadAffinitizedObject
{
private static readonly Task<TriggerIdentifierKind> s_notRenamableTask = Task.FromResult(TriggerIdentifierKind.NotRenamable);
private readonly Task<TriggerIdentifierKind> _isRenamableIdentifierTask;
private readonly CancellationTokenSource _cancellationTokenSource;
private readonly CancellationToken _cancellationToken;
private readonly IAsynchronousOperationListener _asyncListener;
private Task<bool> _newIdentifierBindsTask = SpecializedTasks.False;
private readonly string _originalName;
public string OriginalName { get { return _originalName; } }
private readonly ITrackingSpan _trackingSpan;
public ITrackingSpan TrackingSpan { get { return _trackingSpan; } }
private bool _forceRenameOverloads;
public bool ForceRenameOverloads { get { return _forceRenameOverloads; } }
public TrackingSession(StateMachine stateMachine, SnapshotSpan snapshotSpan, IAsynchronousOperationListener asyncListener)
{
AssertIsForeground();
_asyncListener = asyncListener;
_trackingSpan = snapshotSpan.Snapshot.CreateTrackingSpan(snapshotSpan.Span, SpanTrackingMode.EdgeInclusive);
_cancellationTokenSource = new CancellationTokenSource();
_cancellationToken = _cancellationTokenSource.Token;
if (snapshotSpan.Length > 0)
{
// If the snapshotSpan is nonempty, then the session began with a change that
// was touching a word. Asynchronously determine whether that word was a
// renameable identifier. If it is, alert the state machine so it can trigger
// tagging.
_originalName = snapshotSpan.GetText();
_isRenamableIdentifierTask = Task.Factory.SafeStartNewFromAsync(
() => DetermineIfRenamableIdentifierAsync(snapshotSpan, initialCheck: true),
_cancellationToken,
TaskScheduler.Default);
var asyncToken = _asyncListener.BeginAsyncOperation(GetType().Name + ".UpdateTrackingSessionAfterIsRenamableIdentifierTask");
_isRenamableIdentifierTask.SafeContinueWith(
t => stateMachine.UpdateTrackingSessionIfRenamable(),
_cancellationToken,
TaskContinuationOptions.OnlyOnRanToCompletion,
ForegroundTaskScheduler).CompletesAsyncOperation(asyncToken);
QueueUpdateToStateMachine(stateMachine, _isRenamableIdentifierTask);
}
else
{
// If the snapshotSpan is empty, that means text was added in a location that is
// not touching an existing word, which happens a fair amount when writing new
// code. In this case we already know that the user is not renaming an
// identifier.
_isRenamableIdentifierTask = s_notRenamableTask;
}
}
private void QueueUpdateToStateMachine(StateMachine stateMachine, Task task)
{
var asyncToken = _asyncListener.BeginAsyncOperation($"{GetType().Name}.{nameof(QueueUpdateToStateMachine)}");
task.SafeContinueWith(t =>
{
AssertIsForeground();
if (_isRenamableIdentifierTask.Result != TriggerIdentifierKind.NotRenamable)
{
stateMachine.OnTrackingSessionUpdated(this);
}
},
_cancellationToken,
TaskContinuationOptions.OnlyOnRanToCompletion,
ForegroundTaskScheduler).CompletesAsyncOperation(asyncToken);
}
internal void CheckNewIdentifier(StateMachine stateMachine, ITextSnapshot snapshot)
{
AssertIsForeground();
_newIdentifierBindsTask = _isRenamableIdentifierTask.SafeContinueWithFromAsync(
async t => t.Result != TriggerIdentifierKind.NotRenamable &&
TriggerIdentifierKind.RenamableReference ==
await DetermineIfRenamableIdentifierAsync(
TrackingSpan.GetSpan(snapshot),
initialCheck: false).ConfigureAwait(false),
_cancellationToken,
TaskContinuationOptions.OnlyOnRanToCompletion,
TaskScheduler.Default);
QueueUpdateToStateMachine(stateMachine, _newIdentifierBindsTask);
}
internal bool IsDefinitelyRenamableIdentifier()
{
// This needs to be able to run on a background thread for the CodeFix
return IsRenamableIdentifier(_isRenamableIdentifierTask, waitForResult: false, cancellationToken: CancellationToken.None);
}
public void Cancel()
{
AssertIsForeground();
_cancellationTokenSource.Cancel();
}
private async Task<TriggerIdentifierKind> DetermineIfRenamableIdentifierAsync(SnapshotSpan snapshotSpan, bool initialCheck)
{
AssertIsBackground();
var document = snapshotSpan.Snapshot.GetOpenDocumentInCurrentContextWithChanges();
if (document != null)
{
var syntaxFactsService = document.Project.LanguageServices.GetService<ISyntaxFactsService>();
var syntaxTree = await document.GetSyntaxTreeAsync(_cancellationToken).ConfigureAwait(false);
var token = await syntaxTree.GetTouchingWordAsync(snapshotSpan.Start.Position, syntaxFactsService, _cancellationToken).ConfigureAwait(false);
// The OriginalName is determined with a simple textual check, so for a
// statement such as "Dim [x = 1" the textual check will return a name of "[x".
// The token found for "[x" is an identifier token, but only due to error
// recovery (the "[x" is actually in the trailing trivia). If the OriginalName
// found through the textual check has a different length than the span of the
// touching word, then we cannot perform a rename.
if (initialCheck && token.Span.Length != this.OriginalName.Length)
{
return TriggerIdentifierKind.NotRenamable;
}
var languageHeuristicsService = document.Project.LanguageServices.GetService<IRenameTrackingLanguageHeuristicsService>();
if (syntaxFactsService.IsIdentifier(token) && languageHeuristicsService.IsIdentifierValidForRenameTracking(token.Text))
{
var semanticModel = await document.GetSemanticModelForNodeAsync(token.Parent, _cancellationToken).ConfigureAwait(false);
var semanticFacts = document.GetLanguageService<ISemanticFactsService>();
var renameSymbolInfo = RenameUtilities.GetTokenRenameInfo(semanticFacts, semanticModel, token, _cancellationToken);
if (!renameSymbolInfo.HasSymbols)
{
return TriggerIdentifierKind.NotRenamable;
}
if (renameSymbolInfo.IsMemberGroup)
{
// This is a reference from a nameof expression. Allow the rename but set the RenameOverloads option
_forceRenameOverloads = true;
return await DetermineIfRenamableSymbolsAsync(renameSymbolInfo.Symbols, document, token).ConfigureAwait(false);
}
else
{
// We do not yet support renaming (inline rename or rename tracking) on
// named tuple elements.
if (renameSymbolInfo.Symbols.Single().ContainingType?.IsTupleType() == true)
{
return TriggerIdentifierKind.NotRenamable;
}
return await DetermineIfRenamableSymbolAsync(renameSymbolInfo.Symbols.Single(), document, token).ConfigureAwait(false);
}
}
}
return TriggerIdentifierKind.NotRenamable;
}
private async Task<TriggerIdentifierKind> DetermineIfRenamableSymbolsAsync(IEnumerable<ISymbol> symbols, Document document, SyntaxToken token)
{
foreach (var symbol in symbols)
{
// Get the source symbol if possible
var sourceSymbol = await SymbolFinder.FindSourceDefinitionAsync(symbol, document.Project.Solution, _cancellationToken).ConfigureAwait(false) ?? symbol;
if (!sourceSymbol.IsFromSource())
{
return TriggerIdentifierKind.NotRenamable;
}
}
return TriggerIdentifierKind.RenamableReference;
}
private async Task<TriggerIdentifierKind> DetermineIfRenamableSymbolAsync(ISymbol symbol, Document document, SyntaxToken token)
{
// Get the source symbol if possible
var sourceSymbol = await SymbolFinder.FindSourceDefinitionAsync(symbol, document.Project.Solution, _cancellationToken).ConfigureAwait(false) ?? symbol;
if (sourceSymbol.Kind == SymbolKind.Field &&
((IFieldSymbol)sourceSymbol).ContainingType.IsTupleType &&
sourceSymbol.IsImplicitlyDeclared)
{
// should not rename Item1, Item2...
// when user did not declare them in source.
return TriggerIdentifierKind.NotRenamable;
}
if (!sourceSymbol.IsFromSource())
{
return TriggerIdentifierKind.NotRenamable;
}
return sourceSymbol.Locations.Any(loc => loc == token.GetLocation())
? TriggerIdentifierKind.RenamableDeclaration
: TriggerIdentifierKind.RenamableReference;
}
internal bool CanInvokeRename(
ISyntaxFactsService syntaxFactsService,
IRenameTrackingLanguageHeuristicsService languageHeuristicsService,
bool isSmartTagCheck,
bool waitForResult,
CancellationToken cancellationToken)
{
if (IsRenamableIdentifier(_isRenamableIdentifierTask, waitForResult, cancellationToken))
{
var isRenamingDeclaration = _isRenamableIdentifierTask.Result == TriggerIdentifierKind.RenamableDeclaration;
var newName = TrackingSpan.GetText(TrackingSpan.TextBuffer.CurrentSnapshot);
var comparison = isRenamingDeclaration || syntaxFactsService.IsCaseSensitive ? StringComparison.Ordinal : StringComparison.OrdinalIgnoreCase;
if (!string.Equals(OriginalName, newName, comparison) &&
syntaxFactsService.IsValidIdentifier(newName) &&
languageHeuristicsService.IsIdentifierValidForRenameTracking(newName))
{
// At this point, we want to allow renaming if the user invoked Ctrl+. explicitly, but we
// want to avoid showing a smart tag if we're renaming a reference that binds to an existing
// symbol.
if (!isSmartTagCheck || isRenamingDeclaration || !NewIdentifierDefinitelyBindsToReference())
{
return true;
}
}
}
return false;
}
private bool NewIdentifierDefinitelyBindsToReference()
{
return _newIdentifierBindsTask.Status == TaskStatus.RanToCompletion && _newIdentifierBindsTask.Result;
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Buffers;
using System.Diagnostics;
using System.Runtime.CompilerServices;
using System.Threading;
namespace System.IO.Pipelines
{
/// <summary>
/// Default <see cref="IPipeWriter"/> and <see cref="IPipeReader"/> implementation.
/// </summary>
public class Pipe : IPipe, IPipeReader, IPipeWriter, IReadableBufferAwaiter, IWritableBufferAwaiter
{
private const int SegmentPoolSize = 16;
private static readonly Action<object> _signalReaderAwaitable = state => ((Pipe)state).ReaderCancellationRequested();
private static readonly Action<object> _signalWriterAwaitable = state => ((Pipe)state).WriterCancellationRequested();
private static readonly Action<object> _invokeCompletionCallbacks = state => ((PipeCompletionCallbacks)state).Execute();
private static readonly Action<object> _scheduleContinuation = o => ((Action)o)();
// This sync objects protects the following state:
// 1. _commitHead & _commitHeadIndex
// 2. _length
// 3. _readerAwaitable & _writerAwaitable
private readonly object _sync = new object();
private readonly BufferPool _pool;
private readonly long _maximumSizeHigh;
private readonly long _maximumSizeLow;
private readonly IScheduler _readerScheduler;
private readonly IScheduler _writerScheduler;
private long _length;
private long _currentWriteLength;
private int _pooledSegmentCount;
private PipeAwaitable _readerAwaitable;
private PipeAwaitable _writerAwaitable;
private PipeCompletion _writerCompletion;
private PipeCompletion _readerCompletion;
private BufferSegment[] _bufferSegmentPool;
// The read head which is the extent of the IPipelineReader's consumed bytes
private BufferSegment _readHead;
// The commit head which is the extent of the bytes available to the IPipelineReader to consume
private BufferSegment _commitHead;
private int _commitHeadIndex;
// The write head which is the extent of the IPipelineWriter's written bytes
private BufferSegment _writingHead;
private PipeOperationState _readingState;
private PipeOperationState _writingState;
private bool _disposed;
internal long Length => _length;
/// <summary>
/// Initializes the <see cref="Pipe"/> with the specifed <see cref="IBufferPool"/>.
/// </summary>
/// <param name="pool"></param>
/// <param name="options"></param>
public Pipe(PipeOptions options)
{
if (options == null)
{
throw new ArgumentNullException(nameof(options));
}
if (options.MaximumSizeLow < 0)
{
throw new ArgumentOutOfRangeException(nameof(options.MaximumSizeLow));
}
if (options.MaximumSizeHigh < 0)
{
throw new ArgumentOutOfRangeException(nameof(options.MaximumSizeHigh));
}
if (options.MaximumSizeLow > options.MaximumSizeHigh)
{
throw new ArgumentException(nameof(options.MaximumSizeHigh) + " should be greater or equal to " + nameof(options.MaximumSizeLow), nameof(options.MaximumSizeHigh));
}
_bufferSegmentPool = new BufferSegment[SegmentPoolSize];
_pool = options.BufferPool;
_maximumSizeHigh = options.MaximumSizeHigh;
_maximumSizeLow = options.MaximumSizeLow;
_readerScheduler = options.ReaderScheduler ?? InlineScheduler.Default;
_writerScheduler = options.WriterScheduler ?? InlineScheduler.Default;
_readerAwaitable = new PipeAwaitable(completed: false);
_writerAwaitable = new PipeAwaitable(completed: true);
}
private void ResetState()
{
_readerCompletion.Reset();
_writerCompletion.Reset();
_commitHeadIndex = 0;
_currentWriteLength = 0;
_length = 0;
}
internal Memory<byte> Buffer => _writingHead?.Buffer.Slice(_writingHead.End, _writingHead.WritableBytes) ?? Memory<byte>.Empty;
/// <summary>
/// Allocates memory from the pipeline to write into.
/// </summary>
/// <param name="minimumSize">The minimum size buffer to allocate</param>
/// <returns>A <see cref="WritableBuffer"/> that can be written to.</returns>
WritableBuffer IPipeWriter.Alloc(int minimumSize)
{
if (_writerCompletion.IsCompleted)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.NoWritingAllowed, _writerCompletion.Location);
}
if (minimumSize < 0)
{
PipelinesThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.minimumSize);
}
lock (_sync)
{
// CompareExchange not required as its setting to current value if test fails
_writingState.Begin(ExceptionResource.AlreadyWriting);
if (minimumSize > 0)
{
try
{
AllocateWriteHeadUnsynchronized(minimumSize);
}
catch (Exception)
{
// Reset producing state if allocation failed
_writingState.End(ExceptionResource.NoWriteToComplete);
throw;
}
}
_currentWriteLength = 0;
}
return new WritableBuffer(this);
}
internal void Ensure(int count)
{
EnsureAlloc();
var segment = _writingHead;
if (segment == null)
{
// Changing commit head shared with Reader
lock (_sync)
{
segment = AllocateWriteHeadUnsynchronized(count);
}
}
var bytesLeftInBuffer = segment.WritableBytes;
// If inadequate bytes left or if the segment is readonly
if (bytesLeftInBuffer == 0 || bytesLeftInBuffer < count || segment.ReadOnly)
{
BufferSegment nextSegment;
lock (_sync)
{
nextSegment = CreateSegmentUnsynchronized();
}
nextSegment.SetMemory(_pool.Rent(count));
segment.SetNext(nextSegment);
_writingHead = nextSegment;
}
}
private BufferSegment AllocateWriteHeadUnsynchronized(int count)
{
BufferSegment segment = null;
if (_commitHead != null && !_commitHead.ReadOnly)
{
// Try to return the tail so the calling code can append to it
int remaining = _commitHead.WritableBytes;
if (count <= remaining)
{
// Free tail space of the right amount, use that
segment = _commitHead;
}
}
if (segment == null)
{
// No free tail space, allocate a new segment
segment = CreateSegmentUnsynchronized();
segment.SetMemory(_pool.Rent(count));
}
if (_commitHead == null)
{
// No previous writes have occurred
_commitHead = segment;
}
else if (segment != _commitHead && _commitHead.Next == null)
{
// Append the segment to the commit head if writes have been committed
// and it isn't the same segment (unused tail space)
_commitHead.SetNext(segment);
}
// Set write head to assigned segment
_writingHead = segment;
return segment;
}
private BufferSegment CreateSegmentUnsynchronized()
{
if (_pooledSegmentCount > 0)
{
_pooledSegmentCount--;
return _bufferSegmentPool[_pooledSegmentCount];
}
return new BufferSegment();
}
private void ReturnSegmentUnsynchronized(BufferSegment segment)
{
if (_pooledSegmentCount < _bufferSegmentPool.Length)
{
_bufferSegmentPool[_pooledSegmentCount] = segment;
_pooledSegmentCount++;
}
}
internal void Append(ReadableBuffer buffer)
{
if (buffer.IsEmpty)
{
return; // nothing to do
}
EnsureAlloc();
BufferSegment clonedEnd;
var clonedBegin = BufferSegment.Clone(buffer.Start, buffer.End, out clonedEnd);
if (_writingHead == null)
{
// No active write
lock (_sync)
{
if (_commitHead == null)
{
// No allocated buffers yet, not locking as _readHead will be null
_commitHead = clonedBegin;
}
else
{
Debug.Assert(_commitHead.Next == null);
// Allocated buffer, append as next segment
_commitHead.SetNext(clonedBegin);
}
}
}
else
{
Debug.Assert(_writingHead.Next == null);
// Active write, append as next segment
_writingHead.SetNext(clonedBegin);
}
// Move write head to end of buffer
_writingHead = clonedEnd;
_currentWriteLength += buffer.Length;
}
private void EnsureAlloc()
{
if (!_writingState.IsActive)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.NotWritingNoAlloc);
}
}
internal void Commit()
{
// Changing commit head shared with Reader
lock (_sync)
{
CommitUnsynchronized();
}
}
internal void CommitUnsynchronized()
{
_writingState.End(ExceptionResource.NoWriteToComplete);
if (_writingHead == null)
{
// Nothing written to commit
return;
}
if (_readHead == null)
{
// Update the head to point to the head of the buffer.
// This happens if we called alloc(0) then write
_readHead = _commitHead;
}
// Always move the commit head to the write head
_commitHead = _writingHead;
_commitHeadIndex = _writingHead.End;
_length += _currentWriteLength;
// Do not reset if reader is complete
if (_maximumSizeHigh > 0 &&
_length >= _maximumSizeHigh &&
!_readerCompletion.IsCompleted)
{
_writerAwaitable.Reset();
}
// Clear the writing state
_writingHead = null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
internal void Advance(int bytesWritten)
{
EnsureAlloc();
if (bytesWritten > 0)
{
if (_writingHead == null)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.AdvancingWithNoBuffer);
}
Debug.Assert(!_writingHead.ReadOnly);
Debug.Assert(_writingHead.Next == null);
var buffer = _writingHead.Buffer;
var bufferIndex = _writingHead.End + bytesWritten;
if (bufferIndex > buffer.Length)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.AdvancingPastBufferSize);
}
_writingHead.End = bufferIndex;
_currentWriteLength += bytesWritten;
}
else if (bytesWritten < 0)
{
PipelinesThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.bytesWritten);
} // and if zero, just do nothing; don't need to validate tail etc
}
internal WritableBufferAwaitable FlushAsync(CancellationToken cancellationToken)
{
Action awaitable;
CancellationTokenRegistration cancellationTokenRegistration;
lock (_sync)
{
if (_writingState.IsActive)
{
// Commit the data as not already committed
CommitUnsynchronized();
}
awaitable = _readerAwaitable.Complete();
cancellationTokenRegistration = _writerAwaitable.AttachToken(cancellationToken, _signalWriterAwaitable, this);
}
cancellationTokenRegistration.Dispose();
TrySchedule(_readerScheduler, awaitable);
return new WritableBufferAwaitable(this);
}
internal ReadableBuffer AsReadableBuffer()
{
if (_writingHead == null)
{
return new ReadableBuffer(); // Nothing written return empty
}
ReadCursor readStart;
lock (_sync)
{
readStart = new ReadCursor(_commitHead, _commitHeadIndex);
}
return new ReadableBuffer(readStart, new ReadCursor(_writingHead, _writingHead.End));
}
/// <summary>
/// Marks the pipeline as being complete, meaning no more items will be written to it.
/// </summary>
/// <param name="exception">Optional Exception indicating a failure that's causing the pipeline to complete.</param>
void IPipeWriter.Complete(Exception exception)
{
if (_writingState.IsActive)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.CompleteWriterActiveWriter, _writingState.Location);
}
Action awaitable;
PipeCompletionCallbacks completionCallbacks;
bool readerCompleted;
lock (_sync)
{
completionCallbacks = _writerCompletion.TryComplete(exception);
awaitable = _readerAwaitable.Complete();
readerCompleted = _readerCompletion.IsCompleted;
}
if (completionCallbacks != null)
{
TrySchedule(_readerScheduler, _invokeCompletionCallbacks, completionCallbacks);
}
TrySchedule(_readerScheduler, awaitable);
if (readerCompleted)
{
CompletePipe();
}
}
// Reading
void IPipeReader.Advance(ReadCursor consumed, ReadCursor examined)
{
BufferSegment returnStart = null;
BufferSegment returnEnd = null;
// Reading commit head shared with writer
Action continuation = null;
lock (_sync)
{
var examinedEverything = examined.Segment == _commitHead && examined.Index == _commitHeadIndex;
if (!consumed.IsDefault)
{
if (_readHead == null)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.AdvanceToInvalidCursor);
return;
}
returnStart = _readHead;
returnEnd = consumed.Segment;
// Check if we crossed _maximumSizeLow and complete backpressure
var consumedBytes = ReadCursor.GetLength(returnStart, returnStart.Start, consumed.Segment, consumed.Index);
var oldLength = _length;
_length -= consumedBytes;
if (oldLength >= _maximumSizeLow &&
_length < _maximumSizeLow)
{
continuation = _writerAwaitable.Complete();
}
// Check if we consumed entire last segment
// if we are going to return commit head
// we need to check that there is no writing operation that
// might be using tailspace
if (consumed.Index == returnEnd.End &&
!(_commitHead == returnEnd && _writingState.IsActive))
{
var nextBlock = returnEnd.Next;
if (_commitHead == returnEnd)
{
_commitHead = nextBlock;
_commitHeadIndex = nextBlock?.Start ?? 0;
}
_readHead = nextBlock;
returnEnd = nextBlock;
}
else
{
_readHead = consumed.Segment;
_readHead.Start = consumed.Index;
}
}
// We reset the awaitable to not completed if we've examined everything the producer produced so far
// but only if writer is not completed yet
if (examinedEverything && !_writerCompletion.IsCompleted)
{
// Prevent deadlock where reader awaits new data and writer await backpressure
if (!_writerAwaitable.IsCompleted)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.BackpressureDeadlock);
}
_readerAwaitable.Reset();
}
_readingState.End(ExceptionResource.NoReadToComplete);
while (returnStart != null && returnStart != returnEnd)
{
returnStart.ResetMemory();
ReturnSegmentUnsynchronized(returnStart);
returnStart = returnStart.Next;
}
}
TrySchedule(_writerScheduler, continuation);
}
/// <summary>
/// Signal to the producer that the consumer is done reading.
/// </summary>
/// <param name="exception">Optional Exception indicating a failure that's causing the pipeline to complete.</param>
void IPipeReader.Complete(Exception exception)
{
if (_readingState.IsActive)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.CompleteReaderActiveReader, _readingState.Location);
}
PipeCompletionCallbacks completionCallbacks;
Action awaitable;
bool writerCompleted;
lock (_sync)
{
completionCallbacks = _readerCompletion.TryComplete(exception);
awaitable = _writerAwaitable.Complete();
writerCompleted = _writerCompletion.IsCompleted;
}
if (completionCallbacks != null)
{
TrySchedule(_writerScheduler, _invokeCompletionCallbacks, completionCallbacks);
}
TrySchedule(_writerScheduler, awaitable);
if (writerCompleted)
{
CompletePipe();
}
}
void IPipeReader.OnWriterCompleted(Action<Exception, object> callback, object state)
{
if (callback == null)
{
throw new ArgumentNullException(nameof(callback));
}
PipeCompletionCallbacks completionCallbacks;
lock (_sync)
{
completionCallbacks = _writerCompletion.AddCallback(callback, state);
}
if (completionCallbacks != null)
{
TrySchedule(_readerScheduler, _invokeCompletionCallbacks, completionCallbacks);
}
}
/// <summary>
/// Cancel to currently pending call to <see cref="ReadAsync"/> without completing the <see cref="IPipeReader"/>.
/// </summary>
void IPipeReader.CancelPendingRead()
{
Action awaitable;
lock (_sync)
{
awaitable = _readerAwaitable.Cancel();
}
TrySchedule(_readerScheduler, awaitable);
}
/// <summary>
/// Cancel to currently pending call to <see cref="WritableBuffer.FlushAsync"/> without completing the <see cref="IPipeWriter"/>.
/// </summary>
void IPipeWriter.CancelPendingFlush()
{
Action awaitable;
lock (_sync)
{
awaitable = _writerAwaitable.Cancel();
}
TrySchedule(_writerScheduler, awaitable);
}
void IPipeWriter.OnReaderCompleted(Action<Exception, object> callback, object state)
{
if (callback == null)
{
throw new ArgumentNullException(nameof(callback));
}
PipeCompletionCallbacks completionCallbacks;
lock (_sync)
{
completionCallbacks = _readerCompletion.AddCallback(callback, state);
}
if (completionCallbacks != null)
{
TrySchedule(_writerScheduler, _invokeCompletionCallbacks, completionCallbacks);
}
}
ReadableBufferAwaitable IPipeReader.ReadAsync(CancellationToken token)
{
CancellationTokenRegistration cancellationTokenRegistration;
if (_readerCompletion.IsCompleted)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.NoReadingAllowed, _readerCompletion.Location);
}
lock (_sync)
{
cancellationTokenRegistration = _readerAwaitable.AttachToken(token, _signalReaderAwaitable, this);
}
cancellationTokenRegistration.Dispose();
return new ReadableBufferAwaitable(this);
}
bool IPipeReader.TryRead(out ReadResult result)
{
lock (_sync)
{
if (_readerCompletion.IsCompleted)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.NoReadingAllowed, _readerCompletion.Location);
}
result = new ReadResult();
if (_length > 0 || _readerAwaitable.IsCompleted)
{
GetResult(ref result);
return true;
}
if (_readerAwaitable.HasContinuation)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.AlreadyReading);
}
return false;
}
}
private static void TrySchedule(IScheduler scheduler, Action action)
{
if (action != null)
{
scheduler.Schedule(_scheduleContinuation, action);
}
}
private static void TrySchedule(IScheduler scheduler, Action<object> action, object state)
{
if (action != null)
{
scheduler.Schedule(action, state);
}
}
private void CompletePipe()
{
lock (_sync)
{
if (_disposed)
{
return;
}
_disposed = true;
// Return all segments
var segment = _readHead;
while (segment != null)
{
var returnSegment = segment;
segment = segment.Next;
returnSegment.ResetMemory();
}
_readHead = null;
_commitHead = null;
}
}
// IReadableBufferAwaiter members
bool IReadableBufferAwaiter.IsCompleted => _readerAwaitable.IsCompleted;
void IReadableBufferAwaiter.OnCompleted(Action continuation)
{
Action awaitable;
bool doubleCompletion;
lock (_sync)
{
awaitable = _readerAwaitable.OnCompleted(continuation, out doubleCompletion);
}
if (doubleCompletion)
{
Writer.Complete(PipelinesThrowHelper.GetInvalidOperationException(ExceptionResource.NoConcurrentOperation));
}
TrySchedule(_readerScheduler, awaitable);
}
ReadResult IReadableBufferAwaiter.GetResult()
{
if (!_readerAwaitable.IsCompleted)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.GetResultNotCompleted);
}
var result = new ReadResult();
lock (_sync)
{
GetResult(ref result);
}
return result;
}
private void GetResult(ref ReadResult result)
{
if (_writerCompletion.IsCompletedOrThrow())
{
result.ResultFlags |= ResultFlags.Completed;
}
var isCancelled = _readerAwaitable.ObserveCancelation();
if (isCancelled)
{
result.ResultFlags |= ResultFlags.Cancelled;
}
// No need to read end if there is no head
var head = _readHead;
if (head != null)
{
// Reading commit head shared with writer
result.ResultBuffer.BufferEnd.Segment = _commitHead;
result.ResultBuffer.BufferEnd.Index = _commitHeadIndex;
result.ResultBuffer.BufferLength = _length;
result.ResultBuffer.BufferStart.Segment = head;
result.ResultBuffer.BufferStart.Index = head.Start;
}
if (isCancelled)
{
_readingState.BeginTentative(ExceptionResource.AlreadyReading);
}
else
{
_readingState.Begin(ExceptionResource.AlreadyReading);
}
}
// IWritableBufferAwaiter members
bool IWritableBufferAwaiter.IsCompleted => _writerAwaitable.IsCompleted;
FlushResult IWritableBufferAwaiter.GetResult()
{
var result = new FlushResult();
lock (_sync)
{
if (!_writerAwaitable.IsCompleted)
{
PipelinesThrowHelper.ThrowInvalidOperationException(ExceptionResource.GetResultNotCompleted);
}
// Change the state from to be cancelled -> observed
if (_writerAwaitable.ObserveCancelation())
{
result.ResultFlags |= ResultFlags.Cancelled;
}
if (_readerCompletion.IsCompletedOrThrow())
{
result.ResultFlags |= ResultFlags.Completed;
}
}
return result;
}
void IWritableBufferAwaiter.OnCompleted(Action continuation)
{
Action awaitable;
bool doubleCompletion;
lock (_sync)
{
awaitable = _writerAwaitable.OnCompleted(continuation, out doubleCompletion);
}
if (doubleCompletion)
{
Reader.Complete(PipelinesThrowHelper.GetInvalidOperationException(ExceptionResource.NoConcurrentOperation));
}
TrySchedule(_writerScheduler, awaitable);
}
private void ReaderCancellationRequested()
{
Action action;
lock (_sync)
{
action = _readerAwaitable.Cancel();
}
TrySchedule(_readerScheduler, action);
}
private void WriterCancellationRequested()
{
Action action;
lock (_sync)
{
action = _writerAwaitable.Cancel();
}
TrySchedule(_writerScheduler, action);
}
public IPipeReader Reader => this;
public IPipeWriter Writer => this;
public void Reset()
{
lock (_sync)
{
if (!_disposed)
{
throw new InvalidOperationException("Both reader and writer need to be completed to be able to reset ");
}
_disposed = false;
ResetState();
}
}
}
}
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PlotView.cs" company="OxyPlot">
// Copyright (c) 2014 OxyPlot contributors
// </copyright>
// <summary>
// Provides a view that can show a <see cref="PlotModel" />.
// </summary>
// --------------------------------------------------------------------------------------------------------------------
namespace OxyPlot.Xamarin.iOS
{
using Foundation;
using OxyPlot;
using System;
using System.Collections.Generic;
using System.Linq;
using UIKit;
/// <summary>
/// Provides a view that can show a <see cref="PlotModel" />.
/// </summary>
[Register("PlotView")]
public class PlotView : UIView, IPlotView
{
/// <summary>
/// The current plot model.
/// </summary>
private PlotModel model;
/// <summary>
/// The default plot controller.
/// </summary>
private IPlotController defaultController;
private PanZoomGestureRecognizer panZoomGesture = new PanZoomGestureRecognizer();
/// <summary>
/// Initializes a new instance of the <see cref="OxyPlot.Xamarin.iOS.PlotView"/> class.
/// </summary>
public PlotView()
{
this.Initialize ();
}
/// <summary>
/// Initializes a new instance of the <see cref="OxyPlot.Xamarin.iOS.PlotView"/> class.
/// </summary>
/// <param name="frame">The initial frame.</param>
public PlotView(CoreGraphics.CGRect frame) : base(frame)
{
this.Initialize ();
}
/// <summary>
/// Initializes a new instance of the <see cref="OxyPlot.Xamarin.iOS.PlotView"/> class.
/// </summary>
/// <param name="coder">Coder.</param>
[Export ("initWithCoder:")]
public PlotView(NSCoder coder) : base (coder)
{
this.Initialize ();
}
/// <summary>
/// Uses the new layout.
/// </summary>
/// <returns><c>true</c>, if new layout was used, <c>false</c> otherwise.</returns>
[Export ("requiresConstraintBasedLayout")]
bool UseNewLayout ()
{
return true;
}
/// <summary>
/// Initialize the view.
/// </summary>
private void Initialize() {
this.UserInteractionEnabled = true;
this.MultipleTouchEnabled = true;
this.BackgroundColor = UIColor.White;
this.KeepAspectRatioWhenPinching = true;
this.panZoomGesture.AddTarget(HandlePanZoomGesture);
// Do not intercept touches on overlapping views
this.panZoomGesture.ShouldReceiveTouch += (recognizer, touch) => touch.View == this;
}
/// <summary>
/// Gets or sets the <see cref="PlotModel"/> to show in the view.
/// </summary>
/// <value>The <see cref="PlotModel"/>.</value>
public PlotModel Model
{
get
{
return this.model;
}
set
{
if (this.model != value)
{
if (this.model != null)
{
((IPlotModel)this.model).AttachPlotView(null);
this.model = null;
}
if (value != null)
{
((IPlotModel)value).AttachPlotView(this);
this.model = value;
}
this.InvalidatePlot();
}
}
}
/// <summary>
/// Gets or sets the <see cref="IPlotController"/> that handles input events.
/// </summary>
/// <value>The <see cref="IPlotController"/>.</value>
public IPlotController Controller { get; set; }
/// <summary>
/// Gets the actual model in the view.
/// </summary>
/// <value>
/// The actual model.
/// </value>
Model IView.ActualModel
{
get
{
return this.Model;
}
}
/// <summary>
/// Gets the actual <see cref="PlotModel"/> to show.
/// </summary>
/// <value>The actual model.</value>
public PlotModel ActualModel
{
get
{
return this.Model;
}
}
/// <summary>
/// Gets the actual controller.
/// </summary>
/// <value>
/// The actual <see cref="IController" />.
/// </value>
IController IView.ActualController
{
get
{
return this.ActualController;
}
}
/// <summary>
/// Gets the coordinates of the client area of the view.
/// </summary>
public OxyRect ClientArea
{
get
{
// TODO
return new OxyRect(0, 0, 100, 100);
}
}
/// <summary>
/// Gets the actual <see cref="IPlotController"/>.
/// </summary>
/// <value>The actual plot controller.</value>
public IPlotController ActualController
{
get
{
return this.Controller ?? (this.defaultController ?? (this.defaultController = new PlotController()));
}
}
/// <summary>
/// Gets or sets a value indicating whether this <see cref="OxyPlot.Xamarin.iOS.PlotView"/> keeps the aspect ratio when pinching.
/// </summary>
/// <value><c>true</c> if keep aspect ratio when pinching; otherwise, <c>false</c>.</value>
public bool KeepAspectRatioWhenPinching
{
get { return this.panZoomGesture.KeepAspectRatioWhenPinching; }
set { this.panZoomGesture.KeepAspectRatioWhenPinching = value; }
}
/// <summary>
/// How far apart touch points must be on a certain axis to enable scaling that axis.
/// (only applies if KeepAspectRatioWhenPinching == false)
/// </summary>
public double ZoomThreshold
{
get { return this.panZoomGesture.ZoomThreshold; }
set { this.panZoomGesture.ZoomThreshold = value; }
}
/// <summary>
/// If <c>true</c>, and KeepAspectRatioWhenPinching is <c>false</c>, a zoom-out gesture
/// can turn into a zoom-in gesture if the fingers cross. Setting to <c>false</c> will
/// instead simply stop the zoom at that point.
/// </summary>
public bool AllowPinchPastZero
{
get { return this.panZoomGesture.AllowPinchPastZero; }
set { this.panZoomGesture.AllowPinchPastZero = value; }
}
/// <summary>
/// Hides the tracker.
/// </summary>
public void HideTracker()
{
}
/// <summary>
/// Hides the zoom rectangle.
/// </summary>
public void HideZoomRectangle()
{
}
/// <summary>
/// Invalidates the plot (not blocking the UI thread)
/// </summary>
/// <param name="updateData">If set to <c>true</c> update data.</param>
public void InvalidatePlot(bool updateData = true)
{
var actualModel = this.model;
if (actualModel != null)
{
// TODO: update the model on a background thread
((IPlotModel)actualModel).Update(updateData);
}
if (actualModel != null && !actualModel.Background.IsUndefined())
{
this.BackgroundColor = actualModel.Background.ToUIColor();
}
else
{
// Use white as default background color
this.BackgroundColor = UIColor.White;
}
this.SetNeedsDisplay();
}
/// <summary>
/// Sets the cursor type.
/// </summary>
/// <param name="cursorType">The cursor type.</param>
public void SetCursorType(CursorType cursorType)
{
// No cursor on iOS
}
/// <summary>
/// Shows the tracker.
/// </summary>
/// <param name="trackerHitResult">The tracker data.</param>
public void ShowTracker(TrackerHitResult trackerHitResult)
{
// TODO: how to show a tracker on iOS
// the tracker must be moved away from the finger...
}
/// <summary>
/// Shows the zoom rectangle.
/// </summary>
/// <param name="rectangle">The rectangle.</param>
public void ShowZoomRectangle(OxyRect rectangle)
{
// Not needed - better with pinch events on iOS?
}
/// <summary>
/// Stores text on the clipboard.
/// </summary>
/// <param name="text">The text.</param>
public void SetClipboardText(string text)
{
UIPasteboard.General.SetValue(new NSString(text), "public.utf8-plain-text");
}
/// <summary>
/// Draws the content of the view.
/// </summary>
/// <param name="rect">The rectangle to draw.</param>
public override void Draw(CoreGraphics.CGRect rect)
{
if (this.model != null)
{
using (var renderer = new CoreGraphicsRenderContext(UIGraphics.GetCurrentContext()))
{
((IPlotModel)this.model).Render(renderer, rect.Width, rect.Height);
}
}
}
/// <summary>
/// Method invoked when a motion (a shake) has started.
/// </summary>
/// <param name="motion">The motion subtype.</param>
/// <param name="evt">The event arguments.</param>
public override void MotionBegan(UIEventSubtype motion, UIEvent evt)
{
base.MotionBegan(motion, evt);
if (motion == UIEventSubtype.MotionShake)
{
this.ActualController.HandleGesture(this, new OxyShakeGesture(), new OxyKeyEventArgs());
}
}
/// <summary>
/// Used to add/remove the gesture recognizer so that it
/// doesn't prevent the PlotView from being garbage-collected.
/// </summary>
/// <param name="newsuper">New superview</param>
public override void WillMoveToSuperview (UIView newsuper)
{
if (newsuper == null)
{
this.RemoveGestureRecognizer (this.panZoomGesture);
}
else if (this.Superview == null)
{
this.AddGestureRecognizer (this.panZoomGesture);
}
base.WillMoveToSuperview (newsuper);
}
private void HandlePanZoomGesture()
{
switch (panZoomGesture.State)
{
case UIGestureRecognizerState.Began:
ActualController.HandleTouchStarted(this, panZoomGesture.TouchEventArgs);
break;
case UIGestureRecognizerState.Changed:
ActualController.HandleTouchDelta(this, panZoomGesture.TouchEventArgs);
break;
case UIGestureRecognizerState.Ended:
case UIGestureRecognizerState.Cancelled:
ActualController.HandleTouchCompleted(this, panZoomGesture.TouchEventArgs);
break;
}
}
}
}
| |
//
// Copyright (c) 2006-2018 Erik Ylvisaker
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
//
using AgateLib.Mathematics.Geometry;
using AgateLib.Quality;
using Microsoft.Xna.Framework;
using Microsoft.Xna.Framework.Graphics;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
namespace AgateLib.Display.BitmapFont
{
public interface IFontCore
{
string Name { get; }
IReadOnlyDictionary<FontSettings, IFontTexture> FontItems { get; }
int FontHeight(FontState state);
void DrawText(FontState state, SpriteBatch spriteBatch, Vector2 dest, string text);
Size MeasureString(FontState state, string text);
FontSettings GetClosestFontSettings(FontSettings settings);
IFontTexture FontSurface(FontState fontState);
}
public class FontCore : IFontCore
{
private Dictionary<FontSettings, IFontTexture> fontTextures = new Dictionary<FontSettings, IFontTexture>();
public FontCore(string name)
{
Name = name;
}
public string Name { get; set; }
public IReadOnlyDictionary<FontSettings, IFontTexture> FontItems => fontTextures;
public void AddFontTexture(FontSettings settings, IFontTexture fontSurface)
{
Require.ArgumentNotNull(fontSurface, nameof(fontSurface));
fontTextures[settings] = fontSurface;
}
public IFontTexture GetFontSurface(int size, FontStyles fontStyles)
{
return GetFontSurface(new FontSettings(size, fontStyles));
}
public IFontTexture GetFontSurface(FontSettings settings)
{
return fontTextures[settings];
}
public int FontHeight(FontState state)
{
var surface = FontSurface(state);
return surface.FontHeight(state);
}
private int MaxSize(FontStyles style)
{
var keys = fontTextures.Keys.Where(x => x.Style == style);
if (keys.Any())
{
return keys.Max(x => x.Size);
}
else
{
return -1;
}
}
#region --- Finding correctly sized font ---
public IFontTexture FontSurface(FontState state)
{
var settings = GetClosestFontSettings(state.Settings);
var result = fontTextures[settings];
var ratio = state.Settings.Size / (double)settings.Size;
state.ScaleHeight = ratio;
state.ScaleWidth = ratio;
return result;
}
internal FontSettings GetClosestFontSettings(int size, FontStyles style)
{
return GetClosestFontSettings(new FontSettings(size, style));
}
public FontSettings GetClosestFontSettings(FontSettings settings)
{
if (fontTextures.ContainsKey(settings))
{
return settings;
}
int maxSize = MaxSize(settings.Style);
// this happens if we have no font surfaces of this style.
if (maxSize <= 0)
{
FontStyles newStyle;
// OK remove styles until we find an actual font.
if (TryRemoveStyle(settings.Style, FontStyles.Strikeout, out newStyle))
{
return GetClosestFontSettings(settings.Size, newStyle);
}
if (TryRemoveStyle(settings.Style, FontStyles.Italic, out newStyle))
{
return GetClosestFontSettings(settings.Size, newStyle);
}
if (TryRemoveStyle(settings.Style, FontStyles.Underline, out newStyle))
{
return GetClosestFontSettings(settings.Size, newStyle);
}
if (TryRemoveStyle(settings.Style, FontStyles.Bold, out newStyle))
{
return GetClosestFontSettings(settings.Size, newStyle);
}
else
{
Debug.Assert(fontTextures.Count == 0);
throw new InvalidOperationException("There are no font styles defined.");
}
}
if (settings.Size > maxSize)
{
return GetClosestFontSettings(maxSize, settings.Style);
}
for (int i = settings.Size; i <= maxSize; i++)
{
settings.Size = i;
if (fontTextures.ContainsKey(settings))
{
return settings;
}
}
throw new InvalidOperationException("Could not find a valid font.");
}
#endregion
private bool TryRemoveStyle(FontStyles value, FontStyles remove, out FontStyles result)
{
if ((value & remove) == remove)
{
result = ~(~value | remove);
return true;
}
else
{
result = 0;
return false;
}
}
public void DrawText(FontState state, SpriteBatch spriteBatch, Vector2 dest, string text)
{
state.Location = dest;
state.Text = text;
FontSurface(state).DrawText(state, spriteBatch);
}
public Size MeasureString(FontState state, string text)
{
return FontSurface(state).MeasureString(state, text);
}
}
}
| |
#if false //deadcode
//------------------------------------------------------------------------------
// <copyright file="ScatterGatherStream.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Web.Services.Protocols {
using System;
using System.IO;
using System.Diagnostics;
internal class ScatterGatherStream : Stream {
private const int MemStreamMaxLength = Int32.MaxValue;
private MemoryChunk headChunk = null;
private MemoryChunk currentChunk = null;
private long chunkSize = 0;
private int currentOffset = 0;
private int endOffset = 0;
private long currentChunkStartPos = 0;
internal ScatterGatherStream(int chunkSize) {
this.chunkSize = chunkSize;
currentChunk = headChunk = AllocateMemoryChunk(this.chunkSize);
currentOffset = endOffset = 0;
currentChunkStartPos = 0;
}
internal ScatterGatherStream() : this(1024) { }
public override bool CanRead { get { return true; } }
public override bool CanSeek { get { return true; } }
public override bool CanWrite { get { return true; } }
public override void Close() {
headChunk = null;
currentChunk = null;
endOffset = currentOffset = 0;
currentChunkStartPos = 0;
}
public override void Flush() { }
public override long Length {
get {
MemoryChunk endChunk;
return GetLengthInternal(out endChunk);
}
}
private long GetLengthInternal(out MemoryChunk endChunk){
long length = currentChunkStartPos;
MemoryChunk chunk = currentChunk;
while (chunk.Next != null) {
length += chunk.Buffer.Length;
chunk = chunk.Next;
}
length += endOffset;
endChunk = chunk;
return length;
}
public override long Position {
get {
return Seek(0, SeekOrigin.Current);
}
set {
Seek(value, SeekOrigin.Begin);
}
}
public override long Seek(long offset, SeekOrigin loc) {
MemoryChunk chunk = null;;
long relativeOffset = 0;
long absoluteOffset = 0;
if(loc == SeekOrigin.Begin){
absoluteOffset = offset;
if(offset >= currentChunkStartPos){
chunk = currentChunk;
relativeOffset = offset - currentChunkStartPos;
}
else{
chunk = headChunk;
relativeOffset = absoluteOffset;
}
}
else if( loc == SeekOrigin.Current){
absoluteOffset = offset + currentOffset + currentChunkStartPos;
if( (offset + currentOffset) > 0){
chunk = currentChunk;
relativeOffset = offset + currentOffset;
}
else {
chunk = headChunk;
relativeOffset = absoluteOffset;
}
}
else if (loc == SeekOrigin.End){
MemoryChunk endChunk;
long length = GetLengthInternal(out endChunk);
absoluteOffset = offset + length;
if ( (offset + endOffset) > 0 ) {
relativeOffset = offset + endOffset;
chunk = endChunk;
}
else if(absoluteOffset >= currentChunkStartPos){
chunk = currentChunk;
relativeOffset = absoluteOffset - currentChunkStartPos;
}
else {
chunk = headChunk;
relativeOffset = absoluteOffset;
}
}
else
throw new ArgumentOutOfRangeException("loc");
if (relativeOffset < 0 || relativeOffset > MemStreamMaxLength)
throw new ArgumentOutOfRangeException("offset");
long remaining = relativeOffset;
while (chunk.Next != null) {
if (remaining < chunk.Buffer.Length){
currentChunk = chunk;
currentOffset = (int)remaining;
currentChunkStartPos = absoluteOffset - currentOffset;
remaining = -1;
break;
}
remaining -= chunk.Buffer.Length;
chunk = chunk.Next;
}
if (remaining >= 0){
if (remaining <= chunk.Buffer.Length)
currentChunk = chunk;
else {
currentChunk = chunk.Next = AllocateMemoryChunk(2*remaining);
endOffset = 0;
}
currentOffset = (int)remaining;
currentChunkStartPos = absoluteOffset - currentOffset;
SyncEndOffset();
}
return absoluteOffset;
}
public override void SetLength(long absNewLen) {
if (absNewLen < 0 || absNewLen > MemStreamMaxLength)
throw new ArgumentOutOfRangeException("offset");
MemoryChunk chunk;
bool currentPastEnd;
long relNewLen;
if(absNewLen >= currentChunkStartPos){
currentPastEnd = false;
chunk = currentChunk;
relNewLen = absNewLen - currentChunkStartPos;
}
else {
currentPastEnd = true;
chunk = headChunk;
relNewLen = absNewLen;
}
long startPos = 0;
MemoryChunk endChunk = null;
while (chunk != null) {
long endPos = startPos + chunk.Buffer.Length;
if(endPos > relNewLen){
chunk.Next = null;
endOffset = (int)(relNewLen - startPos);
if(chunk == currentChunk)
currentOffset = min(currentOffset, endOffset);
else if(currentPastEnd){
currentChunk = chunk;
currentOffset = endOffset;
currentChunkStartPos = absNewLen - currentOffset;
}
return;
}
startPos = endPos;
endChunk = chunk;
chunk = chunk.Next;
}
//assert(endChunk != null)
endChunk.Next = AllocateMemoryChunk((int)(absNewLen - startPos));
endOffset = (int)(absNewLen - startPos);
}
public override int Read(byte[] buffer, int offset, int count) {
byte[] chunkBuffer = currentChunk.Buffer;
int chunkSize = chunkBuffer.Length;
if (currentChunk.Next == null)
chunkSize = endOffset;
int bytesRead = 0;
while (count > 0) {
if (currentOffset == chunkSize) {
// exit if no more chunks are currently available
if (currentChunk.Next == null)
break;
currentChunkStartPos += currentChunk.Buffer.Length;
currentChunk = currentChunk.Next;
currentOffset = 0;
chunkBuffer = currentChunk.Buffer;
chunkSize = chunkBuffer.Length;
if (currentChunk.Next == null)
chunkSize = endOffset;
}
int readCount = min(count, chunkSize - currentOffset);
Buffer.BlockCopy(chunkBuffer, currentOffset, buffer, offset, readCount);
offset += readCount;
count -= readCount;
currentOffset += readCount;
bytesRead += readCount;
}
return bytesRead;
}
byte[] oneByteBuffer = new byte[1];
public override int ReadByte(){
if(Read(oneByteBuffer, 0, 1) == 1)
return oneByteBuffer[0];
return -1;
}
public override void Write(byte[] buffer, int offset, int count) {
byte[] chunkBuffer = currentChunk.Buffer;
int chunkSize = chunkBuffer.Length;
while (count > 0) {
if (currentOffset == chunkSize) {
// allocate a new chunk if the current one is full
if(currentChunk.Next == null){
currentChunk.Next = AllocateMemoryChunk(count);
endOffset = 0;
}
currentChunkStartPos += currentChunk.Buffer.Length;
currentChunk = currentChunk.Next;
currentOffset = 0;
chunkBuffer = currentChunk.Buffer;
chunkSize = chunkBuffer.Length;
}
int copyCount = min(count, chunkSize - endOffset);
Buffer.BlockCopy(buffer, offset, chunkBuffer, endOffset, copyCount);
offset += copyCount;
count -= copyCount;
currentOffset += copyCount;
SyncEndOffset();
}
}
public override void WriteByte(byte value) {
oneByteBuffer[0] = value;
Write(oneByteBuffer, 0, 1);
}
internal bool GetNextBuffer(out byte[] buffer, out int byteOffset, out int byteCount) {
buffer = null;
byteOffset = 0;
byteCount = 0;
if (currentChunk == null || headChunk == null || (currentChunk.Next == null && currentOffset == endOffset))
return false;
buffer = currentChunk.Buffer;
if (currentChunk.Next == null) {
byteCount = endOffset;
currentOffset = endOffset;
}
else {
currentChunkStartPos += currentChunk.Buffer.Length;
currentChunk = currentChunk.Next;
byteCount = buffer.Length;
currentOffset = 0;
}
return true;
}
// copy entire buffer into an array
internal virtual byte[] ToArray() {
int length = (int)Length; // this will throw if stream is closed
byte[] copy = new byte[length];
MemoryChunk backupReadChunk = currentChunk;
int backupReadOffset = currentOffset;
currentChunk = headChunk;
currentOffset = 0;
Read(copy, 0, length);
currentChunk = backupReadChunk;
currentOffset = backupReadOffset;
return copy;
}
// write remainder of this stream to another stream
internal virtual void WriteTo(Stream stream) {
if (stream == null)
throw new ArgumentNullException("stream");
byte[] chunkBuffer = currentChunk.Buffer;
int chunkSize = chunkBuffer.Length;
if (currentChunk.Next == null)
chunkSize = endOffset;
// following code mirrors Read() logic (currentChunk/currentOffset should
// point just past last byte of last chunk when done)
for (;;){ // loop until end of chunks is found
if (currentOffset == chunkSize) {
// exit if no more chunks are currently available
if (currentChunk.Next == null)
break;
currentChunkStartPos += currentChunk.Buffer.Length;
currentChunk = currentChunk.Next;
currentOffset = 0;
chunkBuffer = currentChunk.Buffer;
chunkSize = chunkBuffer.Length;
if (currentChunk.Next == null)
chunkSize = endOffset;
}
int writeCount = chunkSize - currentOffset;
stream.Write(chunkBuffer, currentOffset, writeCount);
currentOffset = chunkSize;
}
}
private static int min(int a, int b) { return a < b ? a : b;}
private MemoryChunk AllocateMemoryChunk(long newSize) {
if(newSize > chunkSize) chunkSize = newSize;
MemoryChunk chunk = new MemoryChunk();
chunk.Buffer = new byte[chunkSize];
chunkSize*=2;//nexttime alloc more
chunk.Next = null;
return chunk;
}
private void SyncEndOffset() {
if (currentChunk.Next == null && currentOffset > endOffset)
endOffset = currentOffset;
}
private class MemoryChunk {
internal byte[] Buffer = null;
internal MemoryChunk Next = null;
}
}
}
#endif
| |
// Copyright (c) 2010-2014 SharpDX - Alexandre Mutel
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
// -----------------------------------------------------------------------------
// The following code is a port of XNA StockEffects http://xbox.create.msdn.com/en-US/education/catalog/sample/stock_effects
// -----------------------------------------------------------------------------
// Microsoft Public License (Ms-PL)
//
// This license governs use of the accompanying software. If you use the
// software, you accept this license. If you do not accept the license, do not
// use the software.
//
// 1. Definitions
// The terms "reproduce," "reproduction," "derivative works," and
// "distribution" have the same meaning here as under U.S. copyright law.
// A "contribution" is the original software, or any additions or changes to
// the software.
// A "contributor" is any person that distributes its contribution under this
// license.
// "Licensed patents" are a contributor's patent claims that read directly on
// its contribution.
//
// 2. Grant of Rights
// (A) Copyright Grant- Subject to the terms of this license, including the
// license conditions and limitations in section 3, each contributor grants
// you a non-exclusive, worldwide, royalty-free copyright license to reproduce
// its contribution, prepare derivative works of its contribution, and
// distribute its contribution or any derivative works that you create.
// (B) Patent Grant- Subject to the terms of this license, including the license
// conditions and limitations in section 3, each contributor grants you a
// non-exclusive, worldwide, royalty-free license under its licensed patents to
// make, have made, use, sell, offer for sale, import, and/or otherwise dispose
// of its contribution in the software or derivative works of the contribution
// in the software.
//
// 3. Conditions and Limitations
// (A) No Trademark License- This license does not grant you rights to use any
// contributors' name, logo, or trademarks.
// (B) If you bring a patent claim against any contributor over patents that
// you claim are infringed by the software, your patent license from such
// contributor to the software ends automatically.
// (C) If you distribute any portion of the software, you must retain all
// copyright, patent, trademark, and attribution notices that are present in the
// software.
// (D) If you distribute any portion of the software in source code form, you
// may do so only under this license by including a complete copy of this
// license with your distribution. If you distribute any portion of the software
// in compiled or object code form, you may only do so under a license that
// complies with this license.
// (E) The software is licensed "as-is." You bear the risk of using it. The
// contributors give no express warranties, guarantees or conditions. You may
// have additional consumer rights under your local laws which this license
// cannot change. To the extent permitted under your local laws, the
// contributors exclude the implied warranties of merchantability, fitness for a
// particular purpose and non-infringement.
//-----------------------------------------------------------------------------
// AlphaTestEffect.cs
//
// Microsoft XNA Community Game Platform
// Copyright (C) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
namespace SharpDX.Toolkit.Graphics
{
/// <summary>
/// Built-in effect that supports alpha testing.
/// </summary>
public partial class AlphaTestEffect : Effect, IEffectMatrices, IEffectFog
{
#region Effect Parameters
EffectParameter textureParam;
EffectParameter diffuseColorParam;
EffectParameter alphaTestParam;
EffectParameter fogColorParam;
EffectParameter fogVectorParam;
EffectParameter worldViewProjParam;
EffectPass shaderPass;
#endregion
#region Fields
bool fogEnabled;
bool vertexColorEnabled;
Matrix world = Matrix.Identity;
Matrix view = Matrix.Identity;
Matrix projection = Matrix.Identity;
Matrix worldView;
Vector3 diffuseColor = Vector3.One;
float alpha = 1;
float fogStart = 0;
float fogEnd = 1;
SharpDX.Direct3D11.Comparison alphaFunction = Direct3D11.Comparison.Greater;
int referenceAlpha;
bool isEqNe;
EffectDirtyFlags dirtyFlags = EffectDirtyFlags.All;
#endregion
#region Public Properties
/// <summary>
/// Gets or sets the world matrix.
/// </summary>
public Matrix World
{
get { return world; }
set
{
world = value;
dirtyFlags |= EffectDirtyFlags.WorldViewProj | EffectDirtyFlags.Fog;
}
}
/// <summary>
/// Gets or sets the view matrix.
/// </summary>
public Matrix View
{
get { return view; }
set
{
view = value;
dirtyFlags |= EffectDirtyFlags.WorldViewProj | EffectDirtyFlags.Fog;
}
}
/// <summary>
/// Gets or sets the projection matrix.
/// </summary>
public Matrix Projection
{
get { return projection; }
set
{
projection = value;
dirtyFlags |= EffectDirtyFlags.WorldViewProj;
}
}
/// <summary>
/// Gets or sets the material diffuse color (range 0 to 1).
/// </summary>
public Vector3 DiffuseColor
{
get { return diffuseColor; }
set
{
diffuseColor = value;
dirtyFlags |= EffectDirtyFlags.MaterialColor;
}
}
/// <summary>
/// Gets or sets the material alpha.
/// </summary>
public float Alpha
{
get { return alpha; }
set
{
alpha = value;
dirtyFlags |= EffectDirtyFlags.MaterialColor;
}
}
/// <summary>
/// Gets or sets the fog enable flag.
/// </summary>
public bool FogEnabled
{
get { return fogEnabled; }
set
{
if (fogEnabled != value)
{
fogEnabled = value;
dirtyFlags |= EffectDirtyFlags.ShaderIndex | EffectDirtyFlags.FogEnable;
}
}
}
/// <summary>
/// Gets or sets the fog start distance.
/// </summary>
public float FogStart
{
get { return fogStart; }
set
{
fogStart = value;
dirtyFlags |= EffectDirtyFlags.Fog;
}
}
/// <summary>
/// Gets or sets the fog end distance.
/// </summary>
public float FogEnd
{
get { return fogEnd; }
set
{
fogEnd = value;
dirtyFlags |= EffectDirtyFlags.Fog;
}
}
/// <summary>
/// Gets or sets the fog color.
/// </summary>
public Vector3 FogColor
{
get { return fogColorParam.GetValue<Vector3>(); }
set { fogColorParam.SetValue(value); }
}
/// <summary>
/// Gets or sets the current texture.
/// </summary>
public Texture2D Texture
{
get { return textureParam.GetResource<Texture2D>(); }
set { textureParam.SetResource(value); }
}
/// <summary>
/// Gets or sets whether vertex color is enabled.
/// </summary>
public bool VertexColorEnabled
{
get { return vertexColorEnabled; }
set
{
if (vertexColorEnabled != value)
{
vertexColorEnabled = value;
dirtyFlags |= EffectDirtyFlags.ShaderIndex;
}
}
}
/// <summary>
/// Gets or sets the alpha compare function (default Greater).
/// </summary>
public Direct3D11.Comparison AlphaFunction
{
get { return alphaFunction; }
set
{
alphaFunction = value;
dirtyFlags |= EffectDirtyFlags.AlphaTest;
}
}
/// <summary>
/// Gets or sets the reference alpha value (default 0).
/// </summary>
public int ReferenceAlpha
{
get { return referenceAlpha; }
set
{
referenceAlpha = value;
dirtyFlags |= EffectDirtyFlags.AlphaTest;
}
}
#endregion
#region Methods
/// <summary>
/// Creates a new AlphaTestEffect with default parameter settings.
/// </summary>
public AlphaTestEffect(GraphicsDevice device) : this(device, device.DefaultEffectPool)
{
}
/// <summary>
/// Creates a new AlphaTestEffect with default parameter settings from a specified <see cref="EffectPool"/>.
/// </summary>
public AlphaTestEffect(GraphicsDevice device, EffectPool pool)
: base(device, effectBytecode, pool)
{
}
protected override void Initialize()
{
textureParam = Parameters["Texture"];
diffuseColorParam = Parameters["DiffuseColor"];
alphaTestParam = Parameters["AlphaTest"];
fogColorParam = Parameters["FogColor"];
fogVectorParam = Parameters["FogVector"];
worldViewProjParam = Parameters["WorldViewProj"];
}
///// <summary>
///// Creates a new AlphaTestEffect by cloning parameter settings from an existing instance.
///// </summary>
//protected AlphaTestEffect(AlphaTestEffect cloneSource)
// : base(cloneSource)
//{
// fogEnabled = cloneSource.fogEnabled;
// vertexColorEnabled = cloneSource.vertexColorEnabled;
// world = cloneSource.world;
// view = cloneSource.view;
// projection = cloneSource.projection;
// diffuseColor = cloneSource.diffuseColor;
// alpha = cloneSource.alpha;
// fogStart = cloneSource.fogStart;
// fogEnd = cloneSource.fogEnd;
// alphaFunction = cloneSource.alphaFunction;
// referenceAlpha = cloneSource.referenceAlpha;
//}
///// <summary>
///// Creates a clone of the current AlphaTestEffect instance.
///// </summary>
//public override Effect Clone()
//{
// return new AlphaTestEffect(this);
//}
/// <summary>
/// Lazily computes derived parameter values immediately before applying the effect.
/// </summary>
protected internal override EffectPass OnApply(EffectPass pass)
{
// Recompute the world+view+projection matrix or fog vector?
dirtyFlags = EffectHelpers.SetWorldViewProjAndFog(dirtyFlags, ref world, ref view, ref projection, ref worldView, fogEnabled, fogStart, fogEnd, worldViewProjParam, fogVectorParam);
// Recompute the diffuse/alpha material color parameter?
if ((dirtyFlags & EffectDirtyFlags.MaterialColor) != 0)
{
diffuseColorParam.SetValue(new Vector4(diffuseColor * alpha, alpha));
dirtyFlags &= ~EffectDirtyFlags.MaterialColor;
}
// Recompute the alpha test settings?
if ((dirtyFlags & EffectDirtyFlags.AlphaTest) != 0)
{
var alphaTest = new Vector4();
bool eqNe = false;
// Convert reference alpha from 8 bit integer to 0-1 float format.
float reference = (float)referenceAlpha / 255f;
// Comparison tolerance of half the 8 bit integer precision.
const float threshold = 0.5f / 255f;
switch (alphaFunction)
{
case Direct3D11.Comparison.Less:
// Shader will evaluate: clip((a < x) ? z : w)
alphaTest.X = reference - threshold;
alphaTest.Z = 1;
alphaTest.W = -1;
break;
case Direct3D11.Comparison.LessEqual:
// Shader will evaluate: clip((a < x) ? z : w)
alphaTest.X = reference + threshold;
alphaTest.Z = 1;
alphaTest.W = -1;
break;
case Direct3D11.Comparison.GreaterEqual:
// Shader will evaluate: clip((a < x) ? z : w)
alphaTest.X = reference - threshold;
alphaTest.Z = -1;
alphaTest.W = 1;
break;
case Direct3D11.Comparison.Greater:
// Shader will evaluate: clip((a < x) ? z : w)
alphaTest.X = reference + threshold;
alphaTest.Z = -1;
alphaTest.W = 1;
break;
case Direct3D11.Comparison.Equal:
// Shader will evaluate: clip((abs(a - x) < Y) ? z : w)
alphaTest.X = reference;
alphaTest.Y = threshold;
alphaTest.Z = 1;
alphaTest.W = -1;
eqNe = true;
break;
case Direct3D11.Comparison.NotEqual:
// Shader will evaluate: clip((abs(a - x) < Y) ? z : w)
alphaTest.X = reference;
alphaTest.Y = threshold;
alphaTest.Z = -1;
alphaTest.W = 1;
eqNe = true;
break;
case Direct3D11.Comparison.Never:
// Shader will evaluate: clip((a < x) ? z : w)
alphaTest.Z = -1;
alphaTest.W = -1;
break;
case Direct3D11.Comparison.Always:
default:
// Shader will evaluate: clip((a < x) ? z : w)
alphaTest.Z = 1;
alphaTest.W = 1;
break;
}
alphaTestParam.SetValue(alphaTest);
dirtyFlags &= ~EffectDirtyFlags.AlphaTest;
// If we changed between less/greater vs. equal/notequal
// compare modes, we must also update the shader index.
if (isEqNe != eqNe)
{
isEqNe = eqNe;
dirtyFlags |= EffectDirtyFlags.ShaderIndex;
}
}
// Recompute the shader index?
if ((dirtyFlags & EffectDirtyFlags.ShaderIndex) != 0)
{
int shaderIndex = 0;
if (!fogEnabled)
shaderIndex += 1;
if (vertexColorEnabled)
shaderIndex += 2;
if (isEqNe)
shaderIndex += 4;
shaderPass = pass.SubPasses[shaderIndex];
dirtyFlags &= ~EffectDirtyFlags.ShaderIndex;
}
return base.OnApply(shaderPass);
}
#endregion
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
namespace Apache.Geode.Client.Tests
{
using Apache.Geode.Client;
public class Position
: IGeodeSerializable
{
#region Private members
private long m_avg20DaysVol;
private string m_bondRating;
private double m_convRatio;
private string m_country;
private double m_delta;
private long m_industry;
private long m_issuer;
private double m_mktValue;
private double m_qty;
private string m_secId;
private string m_secLinks;
private string m_secType;
private int m_sharesOutstanding;
private string m_underlyer;
private long m_volatility;
private int m_pid;
private static int m_count = 0;
#endregion
#region Private methods
private void Init()
{
m_avg20DaysVol = 0;
m_bondRating = null;
m_convRatio = 0.0;
m_country = null;
m_delta = 0.0;
m_industry = 0;
m_issuer = 0;
m_mktValue = 0.0;
m_qty = 0.0;
m_secId = null;
m_secLinks = null;
m_secType = null;
m_sharesOutstanding = 0;
m_underlyer = null;
m_volatility = 0;
m_pid = 0;
}
private UInt32 GetObjectSize(IGeodeSerializable obj)
{
return (obj == null ? 0 : obj.ObjectSize);
}
#endregion
#region Public accessors
public string SecId
{
get
{
return m_secId;
}
}
public int Id
{
get
{
return m_pid;
}
}
public int SharesOutstanding
{
get
{
return m_sharesOutstanding;
}
}
public static int Count
{
get
{
return m_count;
}
set
{
m_count = value;
}
}
public override string ToString()
{
return "Position [secId=" + m_secId + " sharesOutstanding=" + m_sharesOutstanding + " type=" + m_secType + " id=" + m_pid + "]";
}
#endregion
#region Constructors
public Position()
{
Init();
}
//This ctor is for a data validation test
public Position(Int32 iForExactVal)
{
Init();
char[] id = new char[iForExactVal + 1];
for (int i = 0; i <= iForExactVal; i++)
{
id[i] = 'a';
}
m_secId = id.ToString();
m_qty = iForExactVal % 2 == 0 ? 1000 : 100;
m_mktValue = m_qty * 2;
m_sharesOutstanding = iForExactVal;
m_secType = "a";
m_pid = iForExactVal;
}
public Position(string id, int shares)
{
Init();
m_secId = id;
m_qty = shares * (m_count % 2 == 0 ? 10.0 : 100.0);
m_mktValue = m_qty * 1.2345998;
m_sharesOutstanding = shares;
m_secType = "a";
m_pid = m_count++;
}
#endregion
#region IGeodeSerializable Members
public void FromData(DataInput input)
{
m_avg20DaysVol = input.ReadInt64();
m_bondRating = input.ReadUTF();
m_convRatio = input.ReadDouble();
m_country = input.ReadUTF();
m_delta = input.ReadDouble();
m_industry = input.ReadInt64();
m_issuer = input.ReadInt64();
m_mktValue = input.ReadDouble();
m_qty = input.ReadDouble();
m_secId = input.ReadUTF();
m_secLinks = input.ReadUTF();
m_secType = input.ReadUTF();
m_sharesOutstanding = input.ReadInt32();
m_underlyer = input.ReadUTF();
m_volatility = input.ReadInt64();
m_pid = input.ReadInt32();
return this;
}
public void ToData(DataOutput output)
{
output.WriteInt64(m_avg20DaysVol);
output.WriteUTF(m_bondRating);
output.WriteDouble(m_convRatio);
output.WriteUTF(m_country);
output.WriteDouble(m_delta);
output.WriteInt64(m_industry);
output.WriteInt64(m_issuer);
output.WriteDouble(m_mktValue);
output.WriteDouble(m_qty);
output.WriteUTF(m_secId);
output.WriteUTF(m_secLinks);
output.WriteUTF(m_secType);
output.WriteInt32(m_sharesOutstanding);
output.WriteUTF(m_underlyer);
output.WriteInt64(m_volatility);
output.WriteInt32(m_pid);
}
public UInt32 ObjectSize
{
get
{
UInt32 objectSize = 0;
objectSize += (UInt32)sizeof(long);
objectSize += (UInt32) (m_bondRating.Length * sizeof(char));
objectSize += (UInt32)sizeof(double);
objectSize += (UInt32)(m_country.Length * sizeof(char));
objectSize += (UInt32)sizeof(double);
objectSize += (UInt32)sizeof(Int64);
objectSize += (UInt32)sizeof(Int64);
objectSize += (UInt32)sizeof(double);
objectSize += (UInt32)sizeof(double);
objectSize += (UInt32)(m_secId.Length * sizeof(char));
objectSize += (UInt32)(m_secLinks.Length * sizeof(char));
objectSize += (UInt32)(m_secType == null ? 0 : sizeof(char) * m_secType.Length);
objectSize += (UInt32)sizeof(Int32);
objectSize += (UInt32)(m_underlyer.Length * sizeof(char));
objectSize += (UInt32)sizeof(Int64);
objectSize += (UInt32)sizeof(Int32);
return objectSize;
}
}
public UInt32 ClassId
{
get
{
return 0x07;
}
}
#endregion
public static IGeodeSerializable CreateDeserializable()
{
return new Position();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using k8s.Models;
using k8s.Util.Informer.Cache;
using Xunit;
namespace k8s.Tests.Util.Informer.Cache
{
public class CacheTest
{
[Fact(DisplayName = "Create default cache success")]
private void CreateCacheSuccess()
{
var cache = new Cache<V1Node>();
cache.Should().NotBeNull();
cache.GetIndexers().ContainsKey(Caches.NamespaceIndex).Should().BeTrue();
}
[Fact(DisplayName = "Add cache item success")]
private void AddCacheItemSuccess()
{
var aPod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
cache.Get(aPod).Equals(aPod).Should().BeTrue();
}
[Fact(DisplayName = "Update cache item success")]
private void UpdateCacheItemSuccess()
{
var aPod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
aPod.Kind = "another-kind";
cache.Update(aPod);
cache.Get(aPod).Kind.Equals(aPod.Kind).Should().BeTrue();
}
[Fact(DisplayName = "Delete cache item success")]
private void DeleteCacheItemSuccess()
{
var aPod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
cache.Delete(aPod);
// Todo: check indices for removed item
cache.Get(aPod).Should().BeNull();
}
[Fact(DisplayName = "Replace cache items success")]
private void ReplaceCacheItemsSuccess()
{
var pods = Helpers.CreatePods(3);
var aPod = pods.First();
var anotherPod = pods.Skip(1).First();
var yetAnotherPod = pods.Skip(2).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
cache.Replace(new[] { anotherPod, yetAnotherPod });
// Todo: check indices for replaced items
cache.Get(anotherPod).Should().NotBeNull();
cache.Get(yetAnotherPod).Should().NotBeNull();
}
[Fact(DisplayName = "List item keys success")]
public void ListItemKeysSuccess()
{
var pods = Helpers.CreatePods(3);
var aPod = pods.First();
var anotherPod = pods.Skip(1).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
cache.Add(anotherPod);
var keys = cache.ListKeys();
keys.Should().Contain($"{aPod.Metadata.NamespaceProperty}/{aPod.Metadata.Name}");
keys.Should().Contain($"{anotherPod.Metadata.NamespaceProperty}/{anotherPod.Metadata.Name}");
}
[Fact(DisplayName = "Get item doesn't exist")]
public void GetItemNotExist()
{
var aPod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
var item = cache.Get(aPod);
item.Should().BeNull();
}
[Fact(DisplayName = "Get item success")]
public void GetItemSuccess()
{
var aPod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
var item = cache.Get(aPod);
item.Equals(aPod).Should().BeTrue();
}
[Fact(DisplayName = "List items success")]
public void ListItemSuccess()
{
var pods = Helpers.CreatePods(3);
var aPod = pods.First();
var anotherPod = pods.Skip(1).First();
var yetAnotherPod = pods.Skip(2).First();
var cache = new Cache<V1Pod>();
cache.Add(aPod);
cache.Add(anotherPod);
cache.Add(yetAnotherPod);
var items = cache.List();
items.Should().HaveCount(3);
items.Should().Contain(aPod);
items.Should().Contain(anotherPod);
items.Should().Contain(yetAnotherPod);
}
[Fact(DisplayName = "Get item by key success")]
public void GetItemByKeySuccess()
{
var pod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(pod);
var item = cache.GetByKey($"{pod.Metadata.NamespaceProperty}/{pod.Metadata.Name}");
item.Should().NotBeNull();
}
[Fact(DisplayName = "Index items no index")]
public void IndexItemsNoIndex()
{
var pod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(pod);
Assert.Throws<ArgumentException>(() => { cache.Index("asdf", pod); });
}
[Fact(DisplayName = "Index items success")]
public void IndexItemsSuccess()
{
var pod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(pod);
var items = cache.Index("namespace", pod);
items.Should().Contain(pod);
}
[Fact(DisplayName = "Get index keys no index")]
public void GetIndexKeysNoIndex()
{
var cache = new Cache<V1Pod>();
Assert.Throws<ArgumentException>(() => { cache.IndexKeys("a", "b"); });
}
[Fact(DisplayName = "Get index keys no indice item")]
public void GetIndexKeysNoIndiceItem()
{
var cache = new Cache<V1Pod>();
Assert.Throws<KeyNotFoundException>(() => { cache.IndexKeys("namespace", "b"); });
}
[Fact(DisplayName = "Get index keys success")]
public void GetIndexKeysSuccess()
{
var pod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(pod);
var keys = cache.IndexKeys("namespace", pod.Metadata.NamespaceProperty);
keys.Should().NotBeNull();
keys.Should().Contain(Caches.MetaNamespaceKeyFunc(pod));
}
[Fact(DisplayName = "List by index no index")]
public void ListByIndexNoIndex()
{
var cache = new Cache<V1Pod>();
Assert.Throws<ArgumentException>(() => { cache.ByIndex("a", "b"); });
}
[Fact(DisplayName = "List by index no indice item")]
public void ListByIndexNoIndiceItem()
{
var cache = new Cache<V1Pod>();
Assert.Throws<KeyNotFoundException>(() => { cache.ByIndex("namespace", "b"); });
}
[Fact(DisplayName = "List by index success")]
public void ListByIndexSuccess()
{
var pod = Helpers.CreatePods(1).First();
var cache = new Cache<V1Pod>();
cache.Add(pod);
var items = cache.ByIndex("namespace", pod.Metadata.NamespaceProperty);
items.Should().Contain(pod);
}
/* Add Indexers */
[Fact(DisplayName = "Add null indexers")]
public void AddNullIndexers()
{
var cache = new Cache<V1Pod>();
Assert.Throws<ArgumentNullException>(() => { cache.AddIndexers(null); });
}
[Fact(DisplayName = "Add indexers with conflict")]
public void AddIndexersConflict()
{
var cache = new Cache<V1Pod>();
Dictionary<string, Func<V1Pod, List<string>>> initialIndexers = new Dictionary<string, Func<V1Pod, List<string>>>()
{
{ "1", pod => new List<string>() },
{ "2", pod => new List<string>() },
};
Dictionary<string, Func<V1Pod, List<string>>> conflictIndexers = new Dictionary<string, Func<V1Pod, List<string>>>()
{
{ "1", pod => new List<string>() },
};
cache.AddIndexers(initialIndexers);
Assert.Throws<ArgumentException>(() => { cache.AddIndexers(conflictIndexers); });
}
[Fact(DisplayName = "Add indexers success")]
public void AddIndexersSuccess()
{
var cache = new Cache<V1Pod>();
Dictionary<string, Func<V1Pod, List<string>>> indexers = new Dictionary<string, Func<V1Pod, List<string>>>()
{
{ "2", pod => new List<string>() { pod.Name() } },
{ "3", pod => new List<string>() { pod.Name() } },
};
cache.AddIndexers(indexers);
var savedIndexers = cache.GetIndexers();
savedIndexers.Should().HaveCount(indexers.Count + 1); // blank cache constructor will add a default index
savedIndexers.Should().Contain(indexers);
// Todo: check indicies collection for new indexname keys
}
/* Add Index Function */
[Fact(DisplayName = "Add index function success")]
public void AddIndexFuncSuccess()
{
var cache = new Cache<V1Pod>();
cache.AddIndexFunc("1", pod => new List<string>() { pod.Name() });
var savedIndexers = cache.GetIndexers();
savedIndexers.Should().HaveCount(2);
// Todo: check indicies collection for new indexname keys
}
/* Get Key Function */
[Fact(DisplayName = "Get default key function success")]
public void GetDefaultKeyFuncSuccess()
{
var pod = new V1Pod()
{
Metadata = new V1ObjectMeta()
{
Name = "a-name",
NamespaceProperty = "the-namespace",
},
};
var cache = new Cache<V1Pod>();
var defaultReturnValue = Caches.DeletionHandlingMetaNamespaceKeyFunc(pod);
var funcReturnValue = cache.KeyFunc(pod);
Assert.True(defaultReturnValue.Equals(funcReturnValue));
}
/* Set Key Function */
[Fact(DisplayName = "Set key function success")]
public void SetKeyFuncSuccess()
{
var aPod = new V1Pod()
{
Kind = "some-kind",
Metadata = new V1ObjectMeta()
{
Name = "a-name",
NamespaceProperty = "the-namespace",
},
};
var cache = new Cache<V1Pod>();
var newFunc = new Func<IKubernetesObject<V1ObjectMeta>, string>((pod) => pod.Kind);
var defaultReturnValue = newFunc(aPod);
cache.SetKeyFunc(newFunc);
var funcReturnValue = cache.KeyFunc(aPod);
Assert.True(defaultReturnValue.Equals(funcReturnValue));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using Tests.Collections;
using Xunit;
namespace System.Collections.ObjectModel.Tests
{
public abstract partial class KeyedCollectionTests<TKey, TValue>
where TValue : IComparable<TValue> where TKey : IEquatable<TKey>
{
private static readonly bool s_keyNullable = default(TKey)
== null;
private static int s_sometimesNullIndex;
public static Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
GetNeverNullKeyMethod
{
get
{
return
new Named
<KeyedCollectionGetKeyedValue<TKey, TValue>>(
"GetNeverNullKey",
GetNeverNullKey);
}
}
public static Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
GetSometimesNullKeyMethod
{
get
{
return
new Named
<KeyedCollectionGetKeyedValue<TKey, TValue>>(
"GetSometimesNullKey",
GetSometimesNullKey);
}
}
public static Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
GetAlwaysNullKeyMethod
{
get
{
return
new Named
<KeyedCollectionGetKeyedValue<TKey, TValue>>(
"GetAlwaysNullKey",
GetAlwaysNullKey);
}
}
public static IEnumerable<object[]> CollectionSizes
{
get
{
yield return new object[] {0};
yield return new object[] {33};
}
}
public static IEnumerable<object[]> ClassData2
{
get
{
yield return new object[] {0, GetNeverNullKeyMethod};
yield return new object[] {33, GetNeverNullKeyMethod};
}
}
public static IEnumerable<object[]> ClassData
{
get
{
yield return new object[] {0, GetNeverNullKeyMethod};
yield return new object[] {33, GetNeverNullKeyMethod};
if (s_keyNullable)
{
yield return
new object[] {0, GetSometimesNullKeyMethod};
yield return
new object[] {33, GetSometimesNullKeyMethod};
yield return
new object[] {0, GetAlwaysNullKeyMethod};
yield return
new object[] {33, GetAlwaysNullKeyMethod};
}
}
}
public static IEnumerable<object[]> ThresholdData
{
get
{
yield return
new object[]
{
32,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Add<T>",
Helper.AddItems)
};
yield return
new object[]
{
-1,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Add<T>",
Helper.AddItems)
};
yield return
new object[]
{
32,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Insert<T>",
Helper.InsertItems)
};
yield return
new object[]
{
-1,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Insert<T>",
Helper.InsertItems)
};
yield return
new object[]
{
32,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Add",
Helper.AddItemsObject)
};
yield return
new object[]
{
-1,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Add",
Helper.AddItemsObject)
};
yield return
new object[]
{
32,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Add",
Helper.InsertItemsObject)
};
yield return
new object[]
{
-1,
new Named
<
AddItemsFunc
<TKey, IKeyedItem<TKey, TValue>>>(
"Add",
Helper.InsertItemsObject)
};
}
}
public static IEnumerable<object[]> ContainsKeyData
{
get
{
var sizes = new[]
{
new object[] {0},
new object[] {1},
new object[] {16},
new object[] {33}
};
object[][] generatorMethods;
if (s_keyNullable)
{
generatorMethods = new[]
{
new object[] {GetNeverNullKeyMethod},
new object[] {GetSometimesNullKeyMethod},
new object[] {GetAlwaysNullKeyMethod}
};
}
else
{
generatorMethods = new[]
{
new object[] {GetNeverNullKeyMethod}
};
}
return from size in sizes
from method in generatorMethods
select size.Push(method);
}
}
public static IEnumerable<object[]> DictionaryData
{
get
{
yield return new object[] {10, 0, 0, 0, 0};
yield return new object[] {0, 10, 0, 0, 0};
yield return new object[] {10, 0, 5, 0, 0};
yield return new object[] {0, 10, 5, 0, 0};
yield return new object[] {10, 10, 10, 0, 0};
yield return new object[] {10, 0, 0, 5, 0};
yield return new object[] {0, 10, 0, 5, 0};
yield return new object[] {10, 10, 0, 10, 0};
yield return new object[] {10, 0, 3, 3, 0};
yield return new object[] {0, 10, 3, 3, 0};
yield return new object[] {10, 10, 5, 5, 0};
yield return new object[] {10, 0, 0, 0, 32};
yield return new object[] {0, 10, 0, 0, 32};
yield return new object[] {10, 0, 5, 0, 32};
yield return new object[] {0, 10, 5, 0, 32};
yield return new object[] {10, 10, 10, 0, 32};
yield return new object[] {10, 0, 0, 5, 32};
yield return new object[] {0, 10, 0, 5, 32};
yield return new object[] {10, 10, 0, 10, 32};
yield return new object[] {10, 0, 3, 3, 32};
yield return new object[] {0, 10, 3, 3, 32};
yield return new object[] {10, 10, 5, 5, 32};
}
}
public abstract TKey GetKeyForItem(TValue item);
public abstract TValue GenerateValue();
public object GenerateValueObject()
{
return GenerateValue();
}
private static IKeyedItem<TKey, TValue> GetNeverNullKey(
Func<TValue> getValue,
Func<TValue, TKey> getKeyForItem)
{
TValue item = getValue();
return new KeyedItem<TKey, TValue>(
getKeyForItem(item),
item);
}
private static IKeyedItem<TKey, TValue> GetSometimesNullKey(
Func<TValue> getValue,
Func<TValue, TKey> getKeyForItem)
{
TValue item = getValue();
return
new KeyedItem<TKey, TValue>(
(s_sometimesNullIndex++ & 1) == 0
? default(TKey)
: getKeyForItem(item),
item);
}
private static IKeyedItem<TKey, TValue> GetAlwaysNullKey(
Func<TValue> getValue,
Func<TValue, TKey> getKeyForItem)
{
return new KeyedItem<TKey, TValue>(
default(TKey),
getValue());
}
[Theory]
[MemberData(nameof(ClassData))]
public void AddNullKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
// Verify Adding a value where the key is null
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tmpKeyedItem = new KeyedItem<TKey, TValue>(
default(TKey),
item3);
keys = keys.Push(key1);
items = items.Push(keyedItem1, tmpKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
collection.Add(keyedItem1);
collection.Add(tmpKeyedItem);
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void AddExistingKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
//[] Verify setting a value where the key already exists in the collection
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tmpKeyedItem = new KeyedItem<TKey, TValue>(key1, item3);
keys = keys.Push(key1);
items = items.Push(keyedItem1);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
collection.Add(keyedItem1);
AssertExtensions.Throws<ArgumentException>(null, () => collection.Add(tmpKeyedItem));
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void AddUniqueKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
//[] Verify setting a value where the key is unique
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tmpKeyedItem = new KeyedItem<TKey, TValue>(key3, item3);
keys = keys.Push(key1, key3);
items = items.Push(keyedItem1, tmpKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1, tmpKeyedItem);
collection.Add(keyedItem1);
collection.Add(tmpKeyedItem);
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void NonGenericAddNullKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
IList nonGenericCollection = collection;
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tmpKeyedItem = new KeyedItem<TKey, TValue>(
default(TKey),
item3);
keys = keys.Push(key1);
items = items.Push(keyedItem1, tmpKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
collection.Add(keyedItem1);
nonGenericCollection.Add(tmpKeyedItem);
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void NonGenericAddExistingKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
IList nonGenericCollection = collection;
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tmpKeyedItem = new KeyedItem<TKey, TValue>(key1, item3);
keys = keys.Push(key1);
items = items.Push(keyedItem1);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
collection.Add(keyedItem1);
AssertExtensions.Throws<ArgumentException>(null, () => nonGenericCollection.Add(tmpKeyedItem));
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void NonGenericAddUniqueKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
IList nonGenericCollection = collection;
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tmpKeyedItem = new KeyedItem<TKey, TValue>(key3, item3);
keys = keys.Push(key1, key3);
items = items.Push(keyedItem1, tmpKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1, tmpKeyedItem);
collection.Add(keyedItem1);
nonGenericCollection.Add(tmpKeyedItem);
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, key2);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1, keyedItem2}.Where(
ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
collection.MyChangeItemKey(keyedItem2, key3);
keyedItem2.Key = key3;
keys[keys.Length - 1] = key3;
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemKeyThrowsPreexistingKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, key2);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1, keyedItem2}.Where(
ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem2, key1));
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemKeySameKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, key2);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1, keyedItem2}.Where(
ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
collection.MyChangeItemKey(keyedItem2, key2);
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemDoesNotExistThrows(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var keyedItem3 = new KeyedItem<TKey, TValue>(key3, item3);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, key2);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1, keyedItem2}.Where(
ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem3, key3));
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem3, key2));
var tempKeyedItem = new KeyedItem<TKey, TValue>(key1, item2);
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(tempKeyedItem, key2));
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemKeyNullToNull(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
var tempKeyedItem =
new KeyedItem<TKey, TValue>(default(TKey), item2);
collection.Add(tempKeyedItem);
keys = keys.Push(key1);
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1}.Where(ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
collection.MyChangeItemKey(tempKeyedItem, default(TKey));
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemKeyNullToNonNull(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
var tempKeyedItem =
new KeyedItem<TKey, TValue>(default(TKey), item2);
collection.Add(tempKeyedItem);
keys = keys.Push(key1);
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1, tempKeyedItem}.Where(
ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
collection.MyChangeItemKey(tempKeyedItem, key2);
tempKeyedItem.Key = key2;
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData))]
public void ChangeItemKeyNonNullToNull(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(
key2,
item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys =
itemsWithKeys.Push(
new[] {keyedItem1}.Where(ki => ki.Key != null)
.ToArray
<IKeyedItem<TKey, TValue>>());
collection.MyChangeItemKey(keyedItem2, default(TKey));
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(CollectionSizes))]
public void ChangeItemKeyNullItemNotPresent(int collectionSize)
{
if (default(TKey) == null)
{
TKey[] keys;
TValue[] items;
TValue[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var collection =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
collection.AddItems(
GenerateValue,
GetKeyForItem,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(item1);
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(default(TValue), key2));
collection.Verify(
keys.Push(key1),
items.Push(item1),
itemsWithKeys.Push(item1));
}
}
[Theory]
[MemberData(nameof(CollectionSizes))]
public void ChangeItemKeyNullItemPresent(int collectionSize)
{
if (default(TKey) == null)
{
TKey[] keys;
TValue[] items;
TValue[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var collection =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
collection.AddItems(
GenerateValue,
GetKeyForItem,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(item1);
collection.Add(default(TValue));
collection.MyChangeItemKey(default(TValue), key2);
collection.Verify(
keys.Push(key1),
items.Push(item1, default(TValue)),
itemsWithKeys.Push(item1));
}
}
[Theory]
[MemberData(nameof(CollectionSizes))]
public void ChangeItemKeyNullKeyNotPresent(int collectionSize)
{
if (default(TKey) == null)
{
TKey[] keys;
TValue[] items;
TValue[] itemsWithKeys;
TValue item1 = GenerateValue();
var collection =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
collection.AddItems(
GenerateValue,
GetKeyForItem,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(item1);
collection.MyChangeItemKey(item1, default(TKey));
collection.Verify(
keys,
items.Push(item1),
itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(CollectionSizes))]
public void ChangeItemKeyNullKeyPresent(int collectionSize)
{
if (default(TKey) == null)
{
TKey[] keys;
TValue[] items;
TValue[] itemsWithKeys;
TValue item1 = GenerateValue();
var collection =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
collection.AddItems(
GenerateValue,
GetKeyForItem,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(item1);
collection.Add(default(TValue));
collection.MyChangeItemKey(item1, default(TKey));
collection.Verify(
keys,
items.Push(item1, default(TValue)),
itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(CollectionSizes))]
public void ChangeItemKeyNullItemNullKeyPresent(
int collectionSize)
{
if (default(TKey) == null)
{
TKey[] keys;
TValue[] items;
TValue[] itemsWithKeys;
TValue item1 = GenerateValue();
var collection =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
collection.AddItems(
GenerateValue,
GetKeyForItem,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(item1);
collection.Add(default(TValue));
collection.MyChangeItemKey(
default(TValue),
default(TKey));
collection.Verify(
keys,
items.Push(item1, default(TValue)),
itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeyKeyAlreadyChanged(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, collectionSize >= 32 ? key2 : key3);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys = itemsWithKeys.Push(keyedItem1, keyedItem2);
keyedItem2.Key = key3;
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem2, key3));
}
else
{
collection.MyChangeItemKey(keyedItem2, key3);
}
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeyKeyAlreadyChangedNewKeyIsOldKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, collectionSize >= 32 ? key2 : key3);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys = itemsWithKeys.Push(keyedItem1, keyedItem2);
keyedItem2.Key = key3;
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem2, key2));
}
else
{
collection.MyChangeItemKey(keyedItem2, key2);
}
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeyKeyAlreadyChangedNewKeyIsDifferent(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TValue item4 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
TKey key3 = GetKeyForItem(item3);
TKey key4 = GetKeyForItem(item4);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(key2, item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1, collectionSize >= 32 ? key2 : key3);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys = itemsWithKeys.Push(keyedItem1, keyedItem2);
keyedItem2.Key = key3;
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem2, key4));
}
else
{
collection.MyChangeItemKey(keyedItem2, key4);
}
collection.Verify(keys, items, itemsWithKeys);
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeyNullToNewKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
var tempKeyedItem =
new KeyedItem<TKey, TValue>(default(TKey), item2);
collection.Add(tempKeyedItem);
keys = keys.Push(key1);
if (collectionSize < 32)
{
keys = keys.Push(key3);
}
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
if (collectionSize < 32)
{
itemsWithKeys = itemsWithKeys.Push(tempKeyedItem);
}
tempKeyedItem.Key = key3;
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(tempKeyedItem, key3));
}
else
{
collection.MyChangeItemKey(tempKeyedItem, key3);
}
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeyNullToOldKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
var tempKeyedItem =
new KeyedItem<TKey, TValue>(default(TKey), item2);
collection.Add(tempKeyedItem);
keys = keys.Push(key1);
if (collectionSize < 32)
{
keys = keys.Push(key3);
}
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
if (collectionSize < 32)
{
itemsWithKeys = itemsWithKeys.Push(tempKeyedItem);
}
tempKeyedItem.Key = key3;
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(tempKeyedItem, default(TKey)));
}
else
{
collection.MyChangeItemKey(
tempKeyedItem,
default(TKey));
}
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeyNullToOtherKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item3 = GenerateValue();
TValue item4 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key3 = GetKeyForItem(item3);
TKey key4 = GetKeyForItem(item4);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
var tempKeyedItem =
new KeyedItem<TKey, TValue>(default(TKey), item2);
collection.Add(tempKeyedItem);
keys = keys.Push(key1);
if (collectionSize < 32)
{
keys = keys.Push(key3);
}
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
if (collectionSize < 32)
{
itemsWithKeys = itemsWithKeys.Push(tempKeyedItem);
}
tempKeyedItem.Key = key3;
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(tempKeyedItem, key4));
}
else
{
collection.MyChangeItemKey(tempKeyedItem, key4);
}
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeySetKeyNonNullToNull(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(
key2,
item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
keyedItem2.Key = default(TKey);
collection.MyChangeItemKey(keyedItem2, default(TKey));
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData2))]
public void ChangeItemKeySetKeyNonNullToNullChangeKeyNonNull(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(
key2,
item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
keyedItem2.Key = default(TKey);
if (collectionSize >= 32)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem2, key2));
}
else
{
collection.MyChangeItemKey(keyedItem2, key2);
}
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ClassData2))]
public void
ChangeItemKeySetKeyNonNullToNullChangeKeySomethingElse(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
if (default(TKey) == null)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
TValue item1 = GenerateValue();
TValue item2 = GenerateValue();
TValue item4 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key2 = GetKeyForItem(item2);
TKey key4 = GetKeyForItem(item4);
var keyedItem1 = new KeyedItem<TKey, TValue>(
key1,
item1);
var keyedItem2 = new KeyedItem<TKey, TValue>(
key2,
item2);
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Add(keyedItem1);
collection.Add(keyedItem2);
keys = keys.Push(key1);
items = items.Push(keyedItem1, keyedItem2);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
keyedItem2.Key = default(TKey);
if (collectionSize >= 32 && keyedItem2.Key != null)
{
AssertExtensions.Throws<ArgumentException>(null, () => collection.MyChangeItemKey(keyedItem2, key4));
}
else
{
collection.MyChangeItemKey(keyedItem2, key4);
}
collection.Verify(keys, items, itemsWithKeys);
}
}
[Theory]
[InlineData(0)]
[InlineData(4)]
[InlineData(25)]
[InlineData(33)]
public void Clear(int collectionSize)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
bool haveDict = collection.GetDictionary() != null;
collection.Clear();
collection.Verify(
new TKey[0],
new IKeyedItem<TKey, TValue>[0],
new IKeyedItem<TKey, TValue>[0]);
Assert.Equal(haveDict, collection.GetDictionary() != null);
}
[Theory]
[MemberData(nameof(ClassData))]
public void Contains(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
if (s_keyNullable)
{
Assert.Throws<ArgumentNullException>(
() => collection.Contains(default(TKey)));
}
else
{
Assert.False(collection.Contains(default(TKey)));
}
}
private void VerifyDictionary(
KeyedCollection<TKey, IKeyedItem<TKey, TValue>> dictionary,
TKey[] expectedKeys,
IKeyedItem<TKey, TValue>[] expectedItems)
{
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
if (expectedKeys.Length != expectedItems.Length)
{
throw new ArgumentException(
"Expected keys length and expected items length must be the same.");
}
Assert.Equal(expectedItems.Length, dictionary.Count);
for (var i = 0; i < expectedKeys.Length; ++i)
{
Assert.Equal(
expectedItems[i],
dictionary[expectedKeys[i]]);
}
}
[Theory]
[MemberData(nameof(ThresholdData))]
public void Threshold(
int collectionDictionaryThreshold,
Named<AddItemsFunc<TKey, IKeyedItem<TKey, TValue>>> addItems)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
if (collectionDictionaryThreshold >= 0)
{
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>(
collectionDictionaryThreshold);
// dictionary is created when the threshold is exceeded
addItems.Value(
collection,
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionDictionaryThreshold,
out keys,
out items,
out itemsWithKeys);
Assert.Null(collection.GetDictionary());
collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>(
collectionDictionaryThreshold);
addItems.Value(
collection,
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionDictionaryThreshold + 1,
out keys,
out items,
out itemsWithKeys);
Assert.NotNull(collection.GetDictionary());
VerifyDictionary(collection, keys, itemsWithKeys);
}
else
{
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>(
collectionDictionaryThreshold);
// dictionary is created when the threshold is exceeded
addItems.Value(
collection,
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
1024,
out keys,
out items,
out itemsWithKeys);
Assert.Null(collection.GetDictionary());
collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>(
collectionDictionaryThreshold);
addItems.Value(
collection,
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
2048,
out keys,
out items,
out itemsWithKeys);
Assert.Null(collection.GetDictionary());
}
}
[Theory]
[MemberData(nameof(ContainsKeyData))]
public void ContainsKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
IKeyedItem<TKey, TValue> itemNotIn =
generateKeyedItem.Value(GenerateValue, GetKeyForItem);
// this is to make overload resolution pick the correct Contains function. replacing keyNotIn with null causes the Contains<TValue> overload to be used. We want the Contains<TKey> version.
TKey keyNotIn = itemNotIn.Key;
if (keyNotIn == null)
{
Assert.Throws<ArgumentNullException>(
() => collection.Contains(keyNotIn));
}
else
{
Assert.False(collection.Contains(keyNotIn));
}
foreach (TKey k in keys)
{
TKey key = k;
if (key == null)
{
Assert.Throws<ArgumentNullException>(
() => collection.Contains(key));
continue;
}
Assert.True(collection.Contains(key));
}
}
[Theory]
[MemberData(nameof(ContainsKeyData))]
public void RemoveKey(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
collection.Verify(keys, items, itemsWithKeys);
IKeyedItem<TKey, TValue> itemNotIn =
generateKeyedItem.Value(GenerateValue, GetKeyForItem);
// this is to make overload resolution pick the correct Contains function. replacing keyNotIn with null causes the Contains<TValue> overload to be used. We want the Contains<TKey> version.
TKey keyNotIn = itemNotIn.Key;
if (keyNotIn == null)
{
Assert.Throws<ArgumentNullException>(
() => collection.Remove(keyNotIn));
}
else
{
Assert.False(collection.Remove(keyNotIn));
}
collection.Verify(keys, items, itemsWithKeys);
var tempKeys = (TKey[]) keys.Clone();
var tempItems = (IKeyedItem<TKey, TValue>[]) items.Clone();
var tempItemsWithKeys =
(IKeyedItem<TKey, TValue>[]) itemsWithKeys.Clone();
for (var i = 0; i < itemsWithKeys.Length; i++)
{
TKey key = keys[i];
if (key == null)
{
Assert.Throws<ArgumentNullException>(
() => collection.Remove(key));
}
else
{
Assert.True(collection.Remove(key));
tempItems =
tempItems.RemoveAt(
Array.IndexOf(tempItems, itemsWithKeys[i]));
tempItemsWithKeys = itemsWithKeys.Slice(
i + 1,
itemsWithKeys.Length - i - 1);
tempKeys = keys.Slice(i + 1, keys.Length - i - 1);
}
collection.Verify(
tempKeys,
tempItems,
tempItemsWithKeys);
}
}
[Theory]
[MemberData(nameof(ContainsKeyData))]
public void KeyIndexer(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
IKeyedItem<TKey, TValue> itemNotIn =
generateKeyedItem.Value(GenerateValue, GetKeyForItem);
// this is to make overload resolution pick the correct Contains function. replacing keyNotIn with null causes the Contains<TValue> overload to be used. We want the Contains<TKey> version.
TKey keyNotIn = itemNotIn.Key;
if (keyNotIn == null)
{
Assert.Throws<ArgumentNullException>(
() => collection[keyNotIn]);
}
else
{
Assert.Throws<KeyNotFoundException>(
() => collection[keyNotIn]);
}
foreach (TKey k in keys)
{
TKey key = k;
if (key == null)
{
Assert.Throws<ArgumentNullException>(
() => collection[key]);
continue;
}
IKeyedItem<TKey, TValue> tmp = collection[key];
}
}
[Theory]
[MemberData(nameof(CollectionSizes))]
public void KeyIndexerSet(int collectionSize)
{
TKey[] keys;
TValue[] items;
TValue[] itemsWithKeys;
var collection =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
collection.AddItems(
GenerateValue,
GetKeyForItem,
collectionSize,
out keys,
out items,
out itemsWithKeys);
foreach (TValue item in itemsWithKeys)
{
collection[collection.IndexOf(item)] = item;
}
}
[Theory]
[MemberData(nameof(DictionaryData))]
public void Dictionary(
int addCount,
int insertCount,
int removeCount,
int removeKeyCount,
int collectionDictionaryThreshold)
{
var collection =
new TestKeyedCollectionOfIKeyedItem<TKey, TValue>(
collectionDictionaryThreshold);
TKey[] tempKeys;
IKeyedItem<TKey, TValue>[] tempItems;
IKeyedItem<TKey, TValue>[] tempItemsWithKeys;
var keys = new TKey[0];
var itemsWithKeys = new IKeyedItem<TKey, TValue>[0];
if (addCount > 0)
{
collection.AddItems(
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
addCount,
out tempKeys,
out tempItems,
out tempItemsWithKeys);
keys = keys.Push(tempKeys);
itemsWithKeys = itemsWithKeys.Push(tempItemsWithKeys);
VerifyDictionary(collection, keys, itemsWithKeys);
}
if (insertCount > 0)
{
collection.InsertItems(
GetNeverNullKeyMethod.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
insertCount,
out tempKeys,
out tempItems,
out tempItemsWithKeys);
keys = keys.Push(tempKeys);
itemsWithKeys = itemsWithKeys.Push(tempItemsWithKeys);
VerifyDictionary(collection, keys, itemsWithKeys);
}
if (removeCount > 0)
{
for (var i = 0; i < removeCount; i++)
{
int index = (((i*43691 << 2)/7 >> 1)*5039)
%collection.Count;
collection.RemoveAt(index);
keys = keys.RemoveAt(index);
itemsWithKeys = itemsWithKeys.RemoveAt(index);
VerifyDictionary(collection, keys, itemsWithKeys);
}
}
if (removeKeyCount > 0)
{
for (var i = 0; i < removeCount; i++)
{
int index = (((i*127 << 2)/7 >> 1)*5039)
%collection.Count;
IKeyedItem<TKey, TValue> item = collection[index];
collection.Remove(item.Key);
keys = keys.RemoveAt(index);
itemsWithKeys = itemsWithKeys.RemoveAt(index);
VerifyDictionary(collection, keys, itemsWithKeys);
}
}
}
[Theory]
[MemberData(nameof(ClassData))]
public void Insert(
int collectionSize,
Named<KeyedCollectionGetKeyedValue<TKey, TValue>>
generateKeyedItem)
{
TValue item1 = GenerateValue();
TValue item3 = GenerateValue();
TKey key1 = GetKeyForItem(item1);
TKey key3 = GetKeyForItem(item3);
var keyedItem1 = new KeyedItem<TKey, TValue>(key1, item1);
var inserts =
new Action
<KeyedCollection<TKey, IKeyedItem<TKey, TValue>>,
int, IKeyedItem<TKey, TValue>>[]
{
(c, i, item) => c.Insert(i, item),
(c, i, item) => ((IList) c).Insert(i, item)
};
foreach (
Action
<KeyedCollection<TKey, IKeyedItem<TKey, TValue>>,
int, IKeyedItem<TKey, TValue>> i in inserts)
{
Action
<KeyedCollection<TKey, IKeyedItem<TKey, TValue>>,
int, IKeyedItem<TKey, TValue>> insert = i;
{
// Insert key is null
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem
<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tempKeyedItem =
new KeyedItem<TKey, TValue>(
default(TKey),
item3);
keys = keys.Push(key1);
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
insert(collection, collection.Count, keyedItem1);
insert(collection, collection.Count, tempKeyedItem);
collection.Verify(keys, items, itemsWithKeys);
}
{
// Insert key already exists
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem
<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tempKeyedItem = new KeyedItem<TKey, TValue>(
key1,
item3);
keys = keys.Push(key1);
items = items.Push(keyedItem1);
itemsWithKeys = itemsWithKeys.Push(keyedItem1);
insert(collection, collection.Count, keyedItem1);
AssertExtensions.Throws<ArgumentException>(null, () => insert(collection, collection.Count, tempKeyedItem));
collection.Verify(keys, items, itemsWithKeys);
}
{
// Insert key is unique
TKey[] keys;
IKeyedItem<TKey, TValue>[] items;
IKeyedItem<TKey, TValue>[] itemsWithKeys;
var collection =
new TestKeyedCollectionOfIKeyedItem
<TKey, TValue>();
collection.AddItems(
generateKeyedItem.Value.Bind(
GenerateValue,
GetKeyForItem),
ki => ki.Key,
collectionSize,
out keys,
out items,
out itemsWithKeys);
var tempKeyedItem = new KeyedItem<TKey, TValue>(
key3,
item3);
keys = keys.Push(key1, key3);
items = items.Push(keyedItem1, tempKeyedItem);
itemsWithKeys = itemsWithKeys.Push(
keyedItem1,
tempKeyedItem);
insert(collection, collection.Count, keyedItem1);
insert(collection, collection.Count, tempKeyedItem);
collection.Verify(keys, items, itemsWithKeys);
}
}
}
}
public abstract class IListTestKeyedCollection<TKey, TValue> :
IListTest<KeyedCollection<TKey, TValue>, TValue>
{
protected IListTestKeyedCollection()
: base(false, false, false, false, true, true)
{
}
protected abstract TKey GetKeyForItem(TValue item);
/// <summary>
/// When overridden in a derived class, Gets an instance of the list under test containing the given items.
/// </summary>
/// <param name="items">The items to initialize the list with.</param>
/// <returns>An instance of the list under test containing the given items.</returns>
protected override KeyedCollection<TKey, TValue> CreateList(
IEnumerable<TValue> items)
{
var ret =
new TestKeyedCollection<TKey, TValue>(GetKeyForItem);
if (items == null)
{
return ret;
}
foreach (TValue item in items)
{
ret.Add(item);
}
return ret;
}
/// <summary>
/// When overridden in a derived class, invalidates any enumerators for the given list.
/// </summary>
/// <param name="list">The list to invalidate enumerators for.</param>
/// <returns>The new contents of the list.</returns>
protected override IEnumerable<TValue> InvalidateEnumerator(
KeyedCollection<TKey, TValue> list)
{
TValue item = CreateItem();
list.Add(item);
return list;
}
}
public abstract class IListTestKeyedCollectionBadKey<TKey, TValue> :
IListTest<KeyedCollection<BadKey<TKey>, TValue>, TValue>
where TKey : IEquatable<TKey>
{
protected IListTestKeyedCollectionBadKey()
: base(false, false, false, false, true, true)
{
}
/// <summary>
/// When overridden in a derived class, Gets an instance of the list under test containing the given items.
/// </summary>
/// <param name="items">The items to initialize the list with.</param>
/// <returns>An instance of the list under test containing the given items.</returns>
protected override KeyedCollection<BadKey<TKey>, TValue>
CreateList(IEnumerable<TValue> items)
{
var ret =
new TestKeyedCollection<BadKey<TKey>, TValue>(
item => new BadKey<TKey>(GetKeyForItem(item)),
new BadKeyComparer<TKey>());
if (items == null)
{
return ret;
}
foreach (TValue item in items)
{
ret.Add(item);
}
return ret;
}
/// <summary>
/// When overridden in a derived class, invalidates any enumerators for the given list.
/// </summary>
/// <param name="list">The list to invalidate enumerators for.</param>
/// <returns>The new contents of the list.</returns>
protected override IEnumerable<TValue> InvalidateEnumerator(
KeyedCollection<BadKey<TKey>, TValue> list)
{
TValue item = CreateItem();
list.Add(item);
return list;
}
protected abstract TKey GetKeyForItem(TValue item);
}
}
| |
//! \file SjTransform.cs
//! \date 2018 Oct 01
//! \brief Encryption method used by Studio Jikkenshitsu.
//
// Copyright (C) 2018 by morkt
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//
using System;
using System.Security.Cryptography;
namespace GameRes.Formats.Jikkenshitsu
{
internal sealed class SjTransform : ICryptoTransform
{
const int BlockSize = 8;
public bool CanReuseTransform { get { return true; } }
public bool CanTransformMultipleBlocks { get { return true; } }
public int InputBlockSize { get { return BlockSize; } }
public int OutputBlockSize { get { return BlockSize; } }
byte[] m_key000 = new byte[0x80];
byte[] m_key080 = new byte[0x80];
public SjTransform (byte[] key)
{
var key_bits = GetKeyBits (key);
InitKey (key_bits);
}
byte[] GetKeyBits (byte[] key)
{
var key_bits = new byte[0x40];
int length = Math.Min (key.Length, 16);
int dst = 0;
for (int src = 0; src < length && key[src] != 0; ++src)
{
byte k = key[src];
key_bits[dst++] = (byte)((k >> 3) & 1);
key_bits[dst++] = (byte)((k >> 2) & 1);
key_bits[dst++] = (byte)((k >> 1) & 1);
key_bits[dst++] = (byte)(k & 1);
}
return key_bits;
}
void InitKey (byte[] key_bits)
{
var key_buf = new int[0xA8];
key_buf[56] = key_bits[56];
key_buf[57] = key_bits[48];
key_buf[58] = key_bits[40];
key_buf[59] = key_bits[32];
key_buf[60] = key_bits[24];
key_buf[61] = key_bits[16];
key_buf[62] = key_bits[8];
key_buf[63] = key_bits[0];
key_buf[64] = key_bits[57];
key_buf[65] = key_bits[49];
key_buf[66] = key_bits[41];
key_buf[67] = key_bits[33];
key_buf[68] = key_bits[25];
key_buf[69] = key_bits[17];
key_buf[70] = key_bits[9];
key_buf[71] = key_bits[1];
key_buf[72] = key_bits[58];
key_buf[73] = key_bits[50];
key_buf[74] = key_bits[42];
key_buf[75] = key_bits[34];
key_buf[76] = key_bits[26];
key_buf[77] = key_bits[18];
key_buf[78] = key_bits[10];
key_buf[79] = key_bits[2];
key_buf[80] = key_bits[59];
key_buf[81] = key_bits[51];
key_buf[82] = key_bits[43];
key_buf[83] = key_bits[35];
key_buf[112] = key_bits[62];
key_buf[113] = key_bits[54];
key_buf[114] = key_bits[46];
key_buf[115] = key_bits[38];
key_buf[116] = key_bits[30];
key_buf[117] = key_bits[22];
key_buf[118] = key_bits[14];
key_buf[119] = key_bits[6];
key_buf[120] = key_bits[61];
key_buf[121] = key_bits[53];
key_buf[122] = key_bits[45];
key_buf[123] = key_bits[37];
key_buf[124] = key_bits[29];
key_buf[125] = key_bits[21];
key_buf[126] = key_bits[13];
key_buf[127] = key_bits[5];
key_buf[128] = key_bits[60];
key_buf[129] = key_bits[52];
key_buf[130] = key_bits[44];
key_buf[131] = key_bits[36];
key_buf[132] = key_bits[28];
key_buf[133] = key_bits[20];
key_buf[134] = key_bits[12];
key_buf[135] = key_bits[4];
key_buf[136] = key_bits[27];
key_buf[137] = key_bits[19];
key_buf[138] = key_bits[11];
key_buf[139] = key_bits[3];
Array.Copy (key_buf, 0x38, key_buf, 0x54, 0x1C);
Array.Copy (key_buf, 0x70, key_buf, 0x8C, 0x1C);
int src1 = 0; //off_45AA20;
int src2 = 0; //off_45AA60;
int k8 = 0; // g_key080;
int k0 = 0; // g_key000;
for (int i = 0; i < 16; ++i)
{
Array.Copy (key_buf, off_45AA20[src1++], key_buf, off_45AAA8[0], 0x1C);
Array.Copy (key_buf, off_45AA60[src2++], key_buf, off_45AB08[0], 0x1C);
m_key080[k8++] = (byte)(key_buf[off_45AAA8[0]] + 2 * (key_buf[off_45AAA8[20]] + 2 * (key_buf[off_45AAA8[9]] + 2 * key_buf[off_45AAA8[15]])));
m_key080[k8++] = (byte)(key_buf[off_45AAA8[18]] + 2 * (key_buf[off_45AAA8[5]] + 2 * (key_buf[off_45AAA8[13]] + 2 * key_buf[off_45AAA8[23]])));
m_key080[k8++] = (byte)(key_buf[off_45AAA8[21]] + 2 * (key_buf[off_45AAA8[3]] + 2 * (key_buf[off_45AAA8[10]] + 2 * key_buf[off_45AAA8[16]])));
m_key080[k8++] = (byte)(key_buf[off_45AAA8[11]] + 2 * (key_buf[off_45AAA8[17]] + 2 * (key_buf[off_45AAA8[22]] + 2 * key_buf[off_45AAA8[6]])));
m_key080[k8++] = (byte)(key_buf[off_45AB08[15]] + 2 * (key_buf[off_45AB08[7]] + 2 * (key_buf[off_45AB08[2]] + 2 * key_buf[off_45AB08[20]])));
m_key080[k8++] = (byte)(key_buf[off_45AB08[4]] + 2 * (key_buf[off_45AB08[13]] + 2 * (key_buf[off_45AB08[19]] + 2 * key_buf[off_45AB08[9]])));
m_key080[k8++] = (byte)(key_buf[off_45AB08[5]] + 2 * (key_buf[off_45AB08[23]] + 2 * (key_buf[off_45AB08[8]] + 2 * key_buf[off_45AB08[17]])));
m_key080[k8++] = (byte)(key_buf[off_45AB08[0]] + 2 * (key_buf[off_45AB08[6]] + 2 * (key_buf[off_45AB08[18]] + 2 * key_buf[off_45AB08[11]])));
m_key000[k0++] = (byte)(key_buf[off_45AAA8[4]] + 2 * key_buf[off_45AAA8[12]]);
m_key000[k0++] = (byte)(key_buf[off_45AAA8[8]] + 2 * key_buf[off_45AAA8[2]]);
m_key000[k0++] = (byte)(key_buf[off_45AAA8[7]] + 2 * key_buf[off_45AAA8[19]]);
m_key000[k0++] = (byte)(key_buf[off_45AAA8[1]] + 2 * key_buf[off_45AAA8[14]]);
m_key000[k0++] = (byte)(key_buf[off_45AB08[22]] + 2 * key_buf[off_45AB08[10]]);
m_key000[k0++] = (byte)(key_buf[off_45AB08[16]] + 2 * key_buf[off_45AB08[1]]);
m_key000[k0++] = (byte)(key_buf[off_45AB08[21]] + 2 * key_buf[off_45AB08[12]]);
m_key000[k0++] = (byte)(key_buf[off_45AB08[3]] + 2 * key_buf[off_45AB08[14]]);
}
}
public int TransformBlock (byte[] inputBuffer, int inputOffset, int inputCount,
byte[] outputBuffer, int outputOffset)
{
for (int i = 0; i < inputCount; i += 8)
{
DoTransform (inputBuffer, inputOffset, outputBuffer, outputOffset);
inputOffset += 8;
outputOffset += 8;
}
return inputCount;
}
public byte[] TransformFinalBlock (byte[] inputBuffer, int inputOffset, int inputCount)
{
byte[] outputBuffer = new byte[inputCount];
int src = inputOffset;
int dst = 0;
if (inputCount >= 8)
{
int count = inputCount & ~7;
TransformBlock (inputBuffer, inputOffset, count, outputBuffer, dst);
dst += count;
inputCount -= count;
}
if (inputCount > 0)
Buffer.BlockCopy (inputBuffer, inputOffset, outputBuffer, dst, inputCount);
return outputBuffer;
}
void DoTransform (byte[] input, int src, byte[] output, int dst)
{
int b07 = (input[src] >> 7) & 1;
int b06 = (input[src] >> 6) & 1;
int b05 = (input[src] >> 5) & 1;
int b04 = (input[src] >> 4) & 1;
int b03 = (input[src] >> 3) & 1;
int b02 = (input[src] >> 2) & 1;
int b01 = (input[src] >> 1) & 1;
int b00 = input[src] & 1;
int b17 = (input[src+1] >> 7) & 1;
int b16 = (input[src+1] >> 6) & 1;
int b15 = (input[src+1] >> 5) & 1;
int b14 = (input[src+1] >> 4) & 1;
int b13 = (input[src+1] >> 3) & 1;
int b12 = (input[src+1] >> 2) & 1;
int b11 = (input[src+1] >> 1) & 1;
int b10 = input[src+1] & 1;
int b27 = (input[src+2] >> 7) & 1;
int b26 = (input[src+2] >> 6) & 1;
int b25 = (input[src+2] >> 5) & 1;
int b24 = (input[src+2] >> 4) & 1;
int b23 = (input[src+2] >> 3) & 1;
int b22 = (input[src+2] >> 2) & 1;
int b21 = (input[src+2] >> 1) & 1;
int b20 = input[src+2] & 1;
int b37 = (input[src+3] >> 7) & 1;
int b36 = (input[src+3] >> 6) & 1;
int b35 = (input[src+3] >> 5) & 1;
int b34 = (input[src+3] >> 4) & 1;
int b33 = (input[src+3] >> 3) & 1;
int b32 = (input[src+3] >> 2) & 1;
int b31 = (input[src+3] >> 1) & 1;
int b30 = input[src+3] & 1;
int b47 = (input[src+4] >> 7) & 1;
int b46 = (input[src+4] >> 6) & 1;
int b45 = (input[src+4] >> 5) & 1;
int b44 = (input[src+4] >> 4) & 1;
int b43 = (input[src+4] >> 3) & 1;
int b42 = (input[src+4] >> 2) & 1;
int b41 = (input[src+4] >> 1) & 1;
int b40 = input[src+4] & 1;
int b57 = (input[src+5] >> 7) & 1;
int b56 = (input[src+5] >> 6) & 1;
int b55 = (input[src+5] >> 5) & 1;
int b54 = (input[src+5] >> 4) & 1;
int b53 = (input[src+5] >> 3) & 1;
int b52 = (input[src+5] >> 2) & 1;
int b51 = (input[src+5] >> 1) & 1;
int b50 = input[src+5] & 1;
int b67 = (input[src+6] >> 7) & 1;
int b66 = (input[src+6] >> 6) & 1;
int b65 = (input[src+6] >> 5) & 1;
int b64 = (input[src+6] >> 4) & 1;
int b63 = (input[src+6] >> 3) & 1;
int b62 = (input[src+6] >> 2) & 1;
int b61 = (input[src+6] >> 1) & 1;
int b60 = input[src+6] & 1;
int b77 = (input[src+7] >> 7) & 1;
int b76 = (input[src+7] >> 6) & 1;
int b75 = (input[src+7] >> 5) & 1;
int b74 = (input[src+7] >> 4) & 1;
int b73 = (input[src+7] >> 3) & 1;
int b72 = (input[src+7] >> 2) & 1;
int b71 = (input[src+7] >> 1) & 1;
int b70 = input[src+7] & 1;
int t0 = 0; // m_key000;
int t1 = 0; // m_key080;
for (int i = 0; i < 8; ++i)
{
byte[] bits;
bits = BitsOrder0[(m_key000[t0++] ^ (b37 | b01 << 1)) << 4 | (m_key080[t1++] ^ (b47 | b57 << 1 | b67 << 2 | b77 << 3))];
b74 ^= bits[0];
b72 ^= bits[1];
b12 ^= bits[2];
b10 ^= bits[3];
bits = BitsOrder1[(m_key000[t0++] ^ (b75 | b47 << 1)) << 4 | (m_key080[t1++] ^ (b07 | b17 << 1 | b27 << 2 | b37 << 3))];
b34 ^= bits[0];
b40 ^= bits[1];
b66 ^= bits[2];
b62 ^= bits[3];
bits = BitsOrder2[(m_key000[t0++] ^ (b35 | b07 << 1)) << 4 | (m_key080[t1++] ^ (b45 | b55 << 1 | b65 << 2 | b75 << 3))];
b02 ^= bits[0];
b04 ^= bits[1];
b20 ^= bits[2];
b26 ^= bits[3];
bits = BitsOrder3[(m_key000[t0++] ^ (b73 | b45 << 1)) << 4 | (m_key080[t1++] ^ (b05 | b15 << 1 | b25 << 2 | b35 << 3))];
b60 ^= bits[0];
b42 ^= bits[1];
b64 ^= bits[2];
b76 ^= bits[3];
bits = BitsOrder4[(m_key000[t0++] ^ (b33 | b05 << 1)) << 4 | (m_key080[t1++] ^ (b43 | b53 << 1 | b63 << 2 | b73 << 3))];
b06 ^= bits[0];
b24 ^= bits[1];
b70 ^= bits[2];
b56 ^= bits[3];
bits = BitsOrder5[(m_key000[t0++] ^ (b71 | b43 << 1)) << 4 | (m_key080[t1++] ^ (b03 | b13 << 1 | b23 << 2 | b33 << 3))];
b46 ^= bits[0];
b30 ^= bits[1];
b54 ^= bits[2];
b52 ^= bits[3];
bits = BitsOrder6[(m_key000[t0++] ^ (b31 | b03 << 1)) << 4 | (m_key080[t1++] ^ (b41 | b51 << 1 | b61 << 2 | b71 << 3))];
b00 ^= bits[0];
b44 ^= bits[1];
b22 ^= bits[2];
b16 ^= bits[3];
bits = BitsOrder7[(m_key000[t0++] ^ (b77 | b41 << 1)) << 4 | (m_key080[t1++] ^ (b01 | b11 << 1 | b21 << 2 | b31 << 3))];
b36 ^= bits[0];
b50 ^= bits[1];
b14 ^= bits[2];
b32 ^= bits[3];
bits = BitsOrder0[(m_key000[t0++] ^ (b36 | b00 << 1)) << 4 | (m_key080[t1++] ^ (b46 | b56 << 1 | b66 << 2 | b76 << 3))];
b75 ^= bits[0];
b73 ^= bits[1];
b13 ^= bits[2];
b11 ^= bits[3];
bits = BitsOrder1[(m_key000[t0++] ^ (b74 | b46 << 1)) << 4 | (m_key080[t1++] ^ (b06 | b16 << 1 | b26 << 2 | b36 << 3))];
b35 ^= bits[0];
b41 ^= bits[1];
b67 ^= bits[2];
b63 ^= bits[3];
bits = BitsOrder2[(m_key000[t0++] ^ (b34 | b06 << 1)) << 4 | (m_key080[t1++] ^ (b44 | b54 << 1 | b64 << 2 | b74 << 3))];
b03 ^= bits[0];
b05 ^= bits[1];
b21 ^= bits[2];
b27 ^= bits[3];
bits = BitsOrder3[(m_key000[t0++] ^ (b72 | b44 << 1)) << 4 | (m_key080[t1++] ^ (b04 | b14 << 1 | b24 << 2 | b34 << 3))];
b61 ^= bits[0];
b43 ^= bits[1];
b65 ^= bits[2];
b77 ^= bits[3];
bits = BitsOrder4[(m_key000[t0++] ^ (b32 | b04 << 1)) << 4 | (m_key080[t1++] ^ (b42 | b52 << 1 | b62 << 2 | b72 << 3))];
b07 ^= bits[0];
b25 ^= bits[1];
b71 ^= bits[2];
b57 ^= bits[3];
bits = BitsOrder5[(m_key000[t0++] ^ (b70 | b42 << 1)) << 4 | (m_key080[t1++] ^ (b02 | b12 << 1 | b22 << 2 | b32 << 3))];
b47 ^= bits[0];
b31 ^= bits[1];
b55 ^= bits[2];
b53 ^= bits[3];
bits = BitsOrder6[(m_key000[t0++] ^ (b30 | b02 << 1)) << 4 | (m_key080[t1++] ^ (b40 | b50 << 1 | b60 << 2 | b70 << 3))];
b01 ^= bits[0];
b45 ^= bits[1];
b23 ^= bits[2];
b17 ^= bits[3];
bits = BitsOrder7[(m_key000[t0++] ^ (b76 | b40 << 1)) << 4 | (m_key080[t1++] ^ (b00 | b10 << 1 | b20 << 2 | b30 << 3))];
b37 ^= bits[0];
b51 ^= bits[1];
b15 ^= bits[2];
b33 ^= bits[3];
}
output[dst ] = (byte)(b01 + 2 * (b00 + 2 * (b03 + 2 * (b02 + 2 * (b05 + 2 * (b04 + 2 * (b07 - (b06 << 1))))))));
output[dst+1] = (byte)(b11 + 2 * (b10 + 2 * (b13 + 2 * (b12 + 2 * (b15 + 2 * (b14 + 2 * (b17 - (b16 << 1))))))));
output[dst+2] = (byte)(b21 + 2 * (b20 + 2 * (b23 + 2 * (b22 + 2 * (b25 + 2 * (b24 + 2 * (b27 - (b26 << 1))))))));
output[dst+3] = (byte)(b31 + 2 * (b30 + 2 * (b33 + 2 * (b32 + 2 * (b35 + 2 * (b34 + 2 * (b37 - (b36 << 1))))))));
output[dst+4] = (byte)(b41 + 2 * (b40 + 2 * (b43 + 2 * (b42 + 2 * (b45 + 2 * (b44 + 2 * (b47 - (b46 << 1))))))));
output[dst+5] = (byte)(b51 + 2 * (b50 + 2 * (b53 + 2 * (b52 + 2 * (b55 + 2 * (b54 + 2 * (b57 - (b56 << 1))))))));
output[dst+6] = (byte)(b61 + 2 * (b60 + 2 * (b63 + 2 * (b62 + 2 * (b65 + 2 * (b64 + 2 * (b67 - (b66 << 1))))))));
output[dst+7] = (byte)(b71 + 2 * (b70 + 2 * (b73 + 2 * (b72 + 2 * (b75 + 2 * (b74 + 2 * (b77 - (b76 << 1))))))));
}
public void Dispose ()
{
}
static readonly byte[] off_45AA20 = {
0x54, 0x53, 0x51, 0x4F, 0x4D, 0x4B, 0x49, 0x47,
0x46, 0x44, 0x42, 0x40, 0x3E, 0x3C, 0x3A, 0x39,
};
static readonly byte[] off_45AA60 = {
0x8C, 0x8B, 0x89, 0x87, 0x85, 0x83, 0x81, 0x7F,
0x7E, 0x7C, 0x7A, 0x78, 0x76, 0x74, 0x72, 0x71,
};
static readonly byte[] off_45AAA8 = { // key_buf
0x00, 0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07,
0x09, 0x0A, 0x0B, 0x0C, 0x0D, 0x0E, 0x0F, 0x10,
0x12, 0x13, 0x14, 0x16, 0x17, 0x19, 0x1A, 0x1B,
};
static readonly byte[] off_45AB08 = { // key_buf
0x1C, 0x1D, 0x1E, 0x1F, 0x20, 0x21, 0x23, 0x24,
0x26, 0x27, 0x28, 0x29, 0x2B, 0x2C, 0x2D, 0x2E,
0x2F, 0x30, 0x31, 0x32, 0x33, 0x34, 0x36, 0x37,
};
static readonly byte[] bits_0000 = { 0, 0, 0, 0 };
static readonly byte[] bits_0001 = { 0, 0, 0, 1 };
static readonly byte[] bits_0010 = { 0, 0, 1, 0 };
static readonly byte[] bits_0011 = { 0, 0, 1, 1 };
static readonly byte[] bits_0100 = { 0, 1, 0, 0 };
static readonly byte[] bits_0101 = { 0, 1, 0, 1 };
static readonly byte[] bits_0110 = { 0, 1, 1, 0 };
static readonly byte[] bits_0111 = { 0, 1, 1, 1 };
static readonly byte[] bits_1000 = { 1, 0, 0, 0 };
static readonly byte[] bits_1001 = { 1, 0, 0, 1 };
static readonly byte[] bits_1010 = { 1, 0, 1, 0 };
static readonly byte[] bits_1011 = { 1, 0, 1, 1 };
static readonly byte[] bits_1100 = { 1, 1, 0, 0 };
static readonly byte[] bits_1101 = { 1, 1, 0, 1 };
static readonly byte[] bits_1110 = { 1, 1, 1, 0 };
static readonly byte[] bits_1111 = { 1, 1, 1, 1 };
static readonly byte[][] BitsOrder0 = {
bits_1110, bits_0100, bits_1101, bits_0001, bits_0010, bits_1111, bits_1011, bits_1000,
bits_0011, bits_1010, bits_0110, bits_1100, bits_0101, bits_1001, bits_0000, bits_0111,
bits_0000, bits_1111, bits_0111, bits_0100, bits_1110, bits_0010, bits_1101, bits_0001,
bits_1010, bits_0110, bits_1100, bits_1011, bits_1001, bits_0101, bits_0011, bits_1000,
bits_0100, bits_0001, bits_1110, bits_1000, bits_1101, bits_0110, bits_0010, bits_1011,
bits_1111, bits_1100, bits_1001, bits_0111, bits_0011, bits_1010, bits_0101, bits_0000,
bits_1111, bits_1100, bits_1000, bits_0010, bits_0100, bits_1001, bits_0001, bits_0111,
bits_0101, bits_1011, bits_0011, bits_1110, bits_1010, bits_0000, bits_0110, bits_1101,
};
static readonly byte[][] BitsOrder1 = {
bits_1111, bits_0001, bits_1000, bits_1110, bits_0110, bits_1011, bits_0011, bits_0100,
bits_1001, bits_0111, bits_0010, bits_1101, bits_1100, bits_0000, bits_0101, bits_1010,
bits_0011, bits_1101, bits_0100, bits_0111, bits_1111, bits_0010, bits_1000, bits_1110,
bits_1100, bits_0000, bits_0001, bits_1010, bits_0110, bits_1001, bits_1011, bits_0101,
bits_0000, bits_1110, bits_0111, bits_1011, bits_1010, bits_0100, bits_1101, bits_0001,
bits_0101, bits_1000, bits_1100, bits_0110, bits_1001, bits_0011, bits_0010, bits_1111,
bits_1101, bits_1000, bits_1010, bits_0001, bits_0011, bits_1111, bits_0100, bits_0010,
bits_1011, bits_0110, bits_0111, bits_1100, bits_0000, bits_0101, bits_1110, bits_1001,
};
static readonly byte[][] BitsOrder2 = {
bits_1010, bits_0000, bits_1001, bits_1110, bits_0110, bits_0011, bits_1111, bits_0101,
bits_0001, bits_1101, bits_1100, bits_0111, bits_1011, bits_0100, bits_0010, bits_1000,
bits_1101, bits_0111, bits_0000, bits_1001, bits_0011, bits_0100, bits_0110, bits_1010,
bits_0010, bits_1000, bits_0101, bits_1110, bits_1100, bits_1011, bits_1111, bits_0001,
bits_1101, bits_0110, bits_0100, bits_1001, bits_1000, bits_1111, bits_0011, bits_0000,
bits_1011, bits_0001, bits_0010, bits_1100, bits_0101, bits_1010, bits_1110, bits_0111,
bits_0001, bits_1010, bits_1101, bits_0000, bits_0110, bits_1001, bits_1000, bits_0111,
bits_0100, bits_1111, bits_1110, bits_0011, bits_1011, bits_0101, bits_0010, bits_1100,
};
static readonly byte[][] BitsOrder3 = {
bits_0111, bits_1101, bits_1110, bits_0011, bits_0000, bits_0110, bits_1001, bits_1010,
bits_0001, bits_0010, bits_1000, bits_0101, bits_1011, bits_1100, bits_0100, bits_1111,
bits_1101, bits_1000, bits_1011, bits_0101, bits_0110, bits_1111, bits_0000, bits_0011,
bits_0100, bits_0111, bits_0010, bits_1100, bits_0001, bits_1010, bits_1110, bits_1001,
bits_1010, bits_0110, bits_1001, bits_0000, bits_1100, bits_1011, bits_0111, bits_1101,
bits_1111, bits_0001, bits_0011, bits_1110, bits_0101, bits_0010, bits_1000, bits_0100,
bits_0011, bits_1111, bits_0000, bits_0110, bits_1010, bits_0001, bits_1101, bits_1000,
bits_1001, bits_0100, bits_0101, bits_1011, bits_1100, bits_0111, bits_0010, bits_1110,
};
static readonly byte[][] BitsOrder4 = {
bits_0010, bits_1100, bits_0100, bits_0001, bits_0111, bits_1010, bits_1011, bits_0110,
bits_1000, bits_0101, bits_0011, bits_1111, bits_1101, bits_0000, bits_1110, bits_1001,
bits_1110, bits_1011, bits_0010, bits_1100, bits_0100, bits_0111, bits_1101, bits_0001,
bits_0101, bits_0000, bits_1111, bits_1010, bits_0011, bits_1001, bits_1000, bits_0110,
bits_0100, bits_0010, bits_0001, bits_1011, bits_1010, bits_1101, bits_0111, bits_1000,
bits_1111, bits_1001, bits_1100, bits_0101, bits_0110, bits_0011, bits_0000, bits_1110,
bits_1011, bits_1000, bits_1100, bits_0111, bits_0001, bits_1110, bits_0010, bits_1101,
bits_0110, bits_1111, bits_0000, bits_1001, bits_1010, bits_0100, bits_0101, bits_0011,
};
static readonly byte[][] BitsOrder5 = {
bits_1100, bits_0001, bits_1010, bits_1111, bits_1001, bits_0010, bits_0110, bits_1000,
bits_0000, bits_1101, bits_0011, bits_0100, bits_1110, bits_0111, bits_0101, bits_1011,
bits_1010, bits_1111, bits_0100, bits_0010, bits_0111, bits_1100, bits_1001, bits_0101,
bits_0110, bits_0001, bits_1101, bits_1110, bits_0000, bits_1011, bits_0011, bits_1000,
bits_1001, bits_1110, bits_1111, bits_0101, bits_0010, bits_1000, bits_1100, bits_0011,
bits_0111, bits_0000, bits_0100, bits_1010, bits_0001, bits_1101, bits_1011, bits_0110,
bits_0100, bits_0011, bits_0010, bits_1100, bits_1001, bits_0101, bits_1111, bits_1010,
bits_1011, bits_1110, bits_0001, bits_0111, bits_0110, bits_0000, bits_1000, bits_1101,
};
static readonly byte[][] BitsOrder6 = {
bits_0100, bits_1011, bits_0010, bits_1110, bits_1111, bits_0000, bits_1000, bits_1101,
bits_0011, bits_1100, bits_1001, bits_0111, bits_0101, bits_1010, bits_0110, bits_0001,
bits_1101, bits_0000, bits_1011, bits_0111, bits_0100, bits_1001, bits_0001, bits_1010,
bits_1110, bits_0011, bits_0101, bits_1100, bits_0010, bits_1111, bits_1000, bits_0110,
bits_0001, bits_0100, bits_1011, bits_1101, bits_1100, bits_0011, bits_0111, bits_1110,
bits_1010, bits_1111, bits_0110, bits_1000, bits_0000, bits_0101, bits_1001, bits_0010,
bits_0110, bits_1011, bits_1101, bits_1000, bits_0001, bits_0100, bits_1010, bits_0111,
bits_1001, bits_0101, bits_0000, bits_1111, bits_1110, bits_0010, bits_0011, bits_1100,
};
static readonly byte[][] BitsOrder7 = {
bits_1101, bits_0010, bits_1000, bits_0100, bits_0110, bits_1111, bits_1011, bits_0001,
bits_1010, bits_1001, bits_0011, bits_1110, bits_0101, bits_0000, bits_1100, bits_0111,
bits_0001, bits_1111, bits_1101, bits_1000, bits_1010, bits_0011, bits_0111, bits_0100,
bits_1100, bits_0101, bits_0110, bits_1011, bits_0000, bits_1110, bits_1001, bits_0010,
bits_0111, bits_1011, bits_0100, bits_0001, bits_1001, bits_1100, bits_1110, bits_0010,
bits_0000, bits_0110, bits_1010, bits_1101, bits_1111, bits_0011, bits_0101, bits_1000,
bits_0010, bits_0001, bits_1110, bits_0111, bits_0100, bits_1010, bits_1000, bits_1101,
bits_1111, bits_1100, bits_1001, bits_0000, bits_0011, bits_0101, bits_0110, bits_1011,
};
}
}
| |
using DevExpress.Mvvm.DataAnnotations;
using DevExpress.Mvvm.Native;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using System.Windows;
using System.Windows.Input;
#if !NETFX_CORE
using DevExpress.Mvvm.POCO;
using System.Windows.Threading;
#else
using Windows.UI.Xaml;
#endif
namespace DevExpress.Mvvm {
#if !SILVERLIGHT && !NETFX_CORE
public abstract class ViewModelBase : BindableBase, ISupportParentViewModel, ISupportServices, ISupportParameter, ICustomTypeDescriptor {
#else
public abstract class ViewModelBase : BindableBase, ISupportParentViewModel, ISupportServices, ISupportParameter
#if !NETFX_CORE
,ICustomTypeProvider
#endif
{
#endif
static readonly object NotSetParameter = new object();
private object parameter = NotSetParameter;
static bool? isInDesignMode;
public static bool IsInDesignMode {
get {
if(ViewModelDesignHelper.IsInDesignModeOverride.HasValue)
return ViewModelDesignHelper.IsInDesignModeOverride.Value;
if(!isInDesignMode.HasValue) {
#if SILVERLIGHT
isInDesignMode = DesignerProperties.IsInDesignTool;
#elif NETFX_CORE
isInDesignMode = Windows.ApplicationModel.DesignMode.DesignModeEnabled;
#else
DependencyPropertyDescriptor property = DependencyPropertyDescriptor.FromProperty(DesignerProperties.IsInDesignModeProperty, typeof(FrameworkElement));
isInDesignMode = (bool)property.Metadata.DefaultValue;
#endif
}
return isInDesignMode.Value;
}
}
object parentViewModel;
object ISupportParentViewModel.ParentViewModel {
get { return parentViewModel; }
set {
if(parentViewModel == value)
return;
parentViewModel = value;
OnParentViewModelChanged(parentViewModel);
}
}
IServiceContainer serviceContainer;
IServiceContainer ISupportServices.ServiceContainer { get { return ServiceContainer; } }
protected IServiceContainer ServiceContainer { get { return serviceContainer ?? (serviceContainer = CreateServiceContainer()); } }
#if !NETFX_CORE
bool IsPOCOViewModel { get { return this is IPOCOViewModel; } }
#else
bool IsPOCOViewModel { get { return false; } }
#endif
public ViewModelBase() {
#if !NETFX_CORE
BuildCommandProperties();
#endif
if(IsInDesignMode) {
#if SILVERLIGHT
Deployment.Current.Dispatcher.BeginInvoke(new Action(OnInitializeInDesignMode));
#elif NETFX_CORE
OnInitializeInDesignMode();
#else
Dispatcher.CurrentDispatcher.BeginInvoke(new Action(OnInitializeInDesignMode));
#endif
} else {
OnInitializeInRuntime();
}
}
protected object Parameter {
get { return object.Equals(parameter, NotSetParameter) ? null : parameter; }
set {
if(parameter == value)
return;
parameter = value;
OnParameterChanged(value);
}
}
object ISupportParameter.Parameter { get { return Parameter; } set { Parameter = value; } }
protected virtual void OnParameterChanged(object parameter) {
}
protected virtual IServiceContainer CreateServiceContainer() {
return new ServiceContainer(this);
}
protected virtual void OnParentViewModelChanged(object parentViewModel) {
}
protected virtual void OnInitializeInDesignMode() {
OnParameterChanged(null);
}
protected virtual void OnInitializeInRuntime() {
}
protected virtual T GetService<T>() where T : class {
return GetService<T>(ServiceSearchMode.PreferLocal);
}
protected virtual T GetService<T>(string key) where T : class {
return GetService<T>(key, ServiceSearchMode.PreferLocal);
}
[EditorBrowsable(EditorBrowsableState.Never)]
protected virtual T GetService<T>(ServiceSearchMode searchMode) where T : class {
return ServiceContainer.GetService<T>(searchMode);
}
[EditorBrowsable(EditorBrowsableState.Never)]
protected virtual T GetService<T>(string key, ServiceSearchMode searchMode) where T : class {
return ServiceContainer.GetService<T>(key, searchMode);
}
#if !NETFX_CORE
#region CommandAttributeSupport
protected internal void RaiseCanExecuteChanged(Expression<Action> commandMethodExpression) {
if(IsPOCOViewModel) {
POCOViewModelExtensions.RaiseCanExecuteChangedCore(this, commandMethodExpression);
} else {
((IDelegateCommand)commandProperties[ExpressionHelper.GetMethod(commandMethodExpression)]
#if !SILVERLIGHT
.GetValue(this)
#else
.GetValue(this, null)
#endif
).RaiseCanExecuteChanged();
}
}
internal const string CommandNameSuffix = "Command";
const string CanExecuteSuffix = "Can";
const string Error_PropertyWithSameNameAlreadyExists = "Property with the same name already exists: {0}.";
internal const string Error_MethodShouldBePublic = "Method should be public: {0}.";
const string Error_MethodCannotHaveMoreThanOneParameter = "Method cannot have more than one parameter: {0}.";
const string Error_MethodCannotHaveOutORRefParameters = "Method cannot have out or reference parameter: {0}.";
const string Error_CanExecuteMethodHasIncorrectParameters = "Can execute method has incorrect parameters: {0}.";
const string Error_MethodNotFound = "Method not found: {0}.";
Dictionary<MethodInfo, CommandProperty> commandProperties;
internal static string GetCommandName(MethodInfo commandMethod) {
return commandMethod.Name + CommandNameSuffix;
}
internal static string GetCanExecuteMethodName(MethodInfo commandMethod) {
return CanExecuteSuffix + commandMethod.Name;
}
internal static T GetAttribute<T>(MethodInfo method) {
return MetadataHelper.GetAllAttributes(method).OfType<T>().FirstOrDefault();
}
static readonly Dictionary<Type, Dictionary<MethodInfo, CommandProperty>> propertiesCache = new Dictionary<Type, Dictionary<MethodInfo, CommandProperty>>();
void BuildCommandProperties() {
commandProperties = IsPOCOViewModel ? new Dictionary<MethodInfo, CommandProperty>() : GetCommandProperties(GetType());
}
static Dictionary<MethodInfo, CommandProperty> GetCommandProperties(Type type) {
Dictionary<MethodInfo, CommandProperty> result = propertiesCache.GetOrAdd(type, () => CreateCommandProperties(type));
return result;
}
static Dictionary<MethodInfo, CommandProperty> CreateCommandProperties(Type type) {
Dictionary<MethodInfo, CommandProperty> commandProperties = type.GetMethods(BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance)
.Where(x => GetAttribute<CommandAttribute>(x) != null).ToArray()
.Select(x => {
CommandAttribute attribute = GetAttribute<CommandAttribute>(x);
string name = attribute.Name ?? (x.Name.EndsWith(CommandNameSuffix) ? x.Name : GetCommandName(x));
MethodInfo canExecuteMethod = GetCanExecuteMethod(type, x, attribute, s => new CommandAttributeException(s), m => m.IsPublic);
var attributes = MetadataHelper.GetAllAttributes(x);
return new CommandProperty(x, canExecuteMethod, name, attribute.GetUseCommandManager(), attributes, type);
})
.ToDictionary(x => x.Method);
foreach(var property in commandProperties.Values) {
if(type.GetProperty(property.Name) != null || commandProperties.Values.Any(x => x.Name == property.Name && x != property))
throw new CommandAttributeException(string.Format(Error_PropertyWithSameNameAlreadyExists, property.Name));
if(!property.Method.IsPublic)
throw new CommandAttributeException(string.Format(Error_MethodShouldBePublic, property.Method.Name));
ValidateCommandMethodParameters(property.Method, x => new CommandAttributeException(x));
}
return commandProperties;
}
internal static bool ValidateCommandMethodParameters(MethodInfo method, Func<string, Exception> createException) {
ParameterInfo[] parameters = method.GetParameters();
if(CheckCommandMethodConditionValue(parameters.Length <= 1, method, Error_MethodCannotHaveMoreThanOneParameter, createException))
return false;
bool isValidSingleParameter = parameters.Length == 1 && (parameters[0].IsOut || parameters[0].ParameterType.IsByRef);
if(CheckCommandMethodConditionValue(!isValidSingleParameter, method, Error_MethodCannotHaveOutORRefParameters, createException)) {
return false;
}
return true;
}
static bool CheckCommandMethodConditionValue(bool value, MethodInfo method, string errorString, Func<string, Exception> createException) {
CommandAttribute attribute = GetAttribute<CommandAttribute>(method);
if(!value && attribute != null && attribute.IsCommand)
throw createException(string.Format(errorString, method.Name));
return !value;
}
internal static MethodInfo GetCanExecuteMethod(Type type, MethodInfo methodInfo, CommandAttribute commandAttribute, Func<string, Exception> createException, Func<MethodInfo, bool> canAccessMethod) {
if(commandAttribute != null && commandAttribute.CanExecuteMethod != null) {
CheckCanExecuteMethod(methodInfo, createException, commandAttribute.CanExecuteMethod, canAccessMethod);
return commandAttribute.CanExecuteMethod;
}
bool hasCustomCanExecuteMethod = commandAttribute != null && !string.IsNullOrEmpty(commandAttribute.CanExecuteMethodName);
string canExecuteMethodName = hasCustomCanExecuteMethod ? commandAttribute.CanExecuteMethodName : GetCanExecuteMethodName(methodInfo);
MethodInfo canExecuteMethod = type.GetMethod(canExecuteMethodName, BindingFlags.Public | BindingFlags.NonPublic | BindingFlags.Instance);
if(hasCustomCanExecuteMethod && canExecuteMethod == null)
throw createException(string.Format(Error_MethodNotFound, commandAttribute.CanExecuteMethodName));
if(canExecuteMethod != null) {
CheckCanExecuteMethod(methodInfo, createException, canExecuteMethod, canAccessMethod);
}
return canExecuteMethod;
}
static void CheckCanExecuteMethod(MethodInfo methodInfo, Func<string, Exception> createException, MethodInfo canExecuteMethod, Func<MethodInfo, bool> canAccessMethod) {
ParameterInfo[] parameters = methodInfo.GetParameters();
ParameterInfo[] canExecuteParameters = canExecuteMethod.GetParameters();
if(parameters.Length != canExecuteParameters.Length)
throw createException(string.Format(Error_CanExecuteMethodHasIncorrectParameters, canExecuteMethod.Name));
if(parameters.Length == 1 && (parameters[0].ParameterType != canExecuteParameters[0].ParameterType || parameters[0].IsOut != canExecuteParameters[0].IsOut))
throw createException(string.Format(Error_CanExecuteMethodHasIncorrectParameters, canExecuteMethod.Name));
if(!canAccessMethod(canExecuteMethod))
throw createException(string.Format(Error_MethodShouldBePublic, canExecuteMethod.Name));
}
public static class CreateCommandHelper<T> {
public static IDelegateCommand CreateCommand(object owner, MethodInfo method, MethodInfo canExecuteMethod, bool? useCommandManager, bool hasParameter) {
return new DelegateCommand<T>(
x => method.Invoke(owner, GetInvokeParameters(x, hasParameter)),
x => canExecuteMethod != null ? (bool)canExecuteMethod.Invoke(owner, GetInvokeParameters(x, hasParameter)) : true
#if !SILVERLIGHT
, useCommandManager
#endif
);
}
static object[] GetInvokeParameters(object parameter, bool hasParameter) {
return hasParameter ? new[] { parameter } : new object[0];
}
}
readonly Dictionary<MethodInfo, IDelegateCommand> commands = new Dictionary<MethodInfo, IDelegateCommand>();
IDelegateCommand GetCommand(MethodInfo method, MethodInfo canExecuteMethod, bool? useCommandManager, bool hasParameter) {
return commands.GetOrAdd(method, () => CreateCommand(method, canExecuteMethod, useCommandManager, hasParameter));
}
IDelegateCommand CreateCommand(MethodInfo method, MethodInfo canExecuteMethod, bool? useCommandManager, bool hasParameter) {
Type commandType = hasParameter ? method.GetParameters()[0].ParameterType : typeof(object);
return (IDelegateCommand)typeof(CreateCommandHelper<>).MakeGenericType(commandType).GetMethod("CreateCommand", BindingFlags.Static | BindingFlags.Public).Invoke(null, new object[] { this, method, canExecuteMethod, useCommandManager, hasParameter });
}
#region CommandProperty
class CommandProperty :
#if !SILVERLIGHT && !NETFX_CORE
PropertyDescriptor
#else
PropertyInfo
#endif
{
readonly MethodInfo method;
readonly MethodInfo canExecuteMethod;
readonly string name;
readonly bool? useCommandManager;
readonly bool hasParameter;
readonly Attribute[] attributes;
readonly Type reflectedType;
public MethodInfo Method { get { return method; } }
public MethodInfo CanExecuteMethod { get { return canExecuteMethod; } }
public CommandProperty(MethodInfo method, MethodInfo canExecuteMethod, string name, bool? useCommandManager, Attribute[] attributes, Type reflectedType)
#if !SILVERLIGHT
: base(name, attributes)
#endif
{
this.method = method;
this.hasParameter = method.GetParameters().Length == 1;
this.canExecuteMethod = canExecuteMethod;
this.name = name;
this.useCommandManager = useCommandManager;
this.attributes = attributes;
this.reflectedType = reflectedType;
}
IDelegateCommand GetCommand(object component) {
return ((ViewModelBase)component).GetCommand(method, canExecuteMethod, useCommandManager, hasParameter);
}
#if !SILVERLIGHT
public override bool CanResetValue(object component) { return false; }
public override Type ComponentType { get { return method.DeclaringType; } }
public override object GetValue(object component) { return GetCommand(component); }
public override bool IsReadOnly { get { return true; } }
public override Type PropertyType { get { return typeof(ICommand); } }
public override void ResetValue(object component) { throw new NotSupportedException(); }
public override void SetValue(object component, object value) { throw new NotSupportedException(); }
public override bool ShouldSerializeValue(object component) { return false; }
#else
public override PropertyAttributes Attributes { get { return PropertyAttributes.None; } }
public override bool CanRead { get { return true; } }
public override bool CanWrite { get { return false; } }
public override MethodInfo[] GetAccessors(bool nonPublic) { throw new NotSupportedException(); }
public override MethodInfo GetGetMethod(bool nonPublic) { return null; }
public override MethodInfo GetSetMethod(bool nonPublic) { return null; }
public override void SetValue(object obj, object value, BindingFlags invokeAttr, Binder binder, object[] index, CultureInfo culture) { throw new NotSupportedException(); }
public override ParameterInfo[] GetIndexParameters() { return new ParameterInfo[0]; }
public override object GetValue(object obj, BindingFlags invokeAttr, Binder binder, object[] index, CultureInfo culture) { return GetCommand(obj); }
public override Type PropertyType { get { return typeof(ICommand); } }
public override Type DeclaringType { get { return method.DeclaringType; } }
public override object[] GetCustomAttributes(Type attributeType, bool inherit) { return new object[0]; }
public override object[] GetCustomAttributes(bool inherit) { return attributes; }
public override bool IsDefined(Type attributeType, bool inherit) { return false; }
public override string Name { get { return name; } }
public override Type ReflectedType { get { return reflectedType.GetType(); } }
#endif
}
#endregion
#if SILVERLIGHT
static readonly Dictionary<Type, CustomType> customTypes = new Dictionary<Type, CustomType>();
CustomType customType;
static CustomType GetCustomType(Type type, IEnumerable<CommandProperty> properties) {
return customTypes.GetOrAdd(type, () => new CustomType(type, properties));
}
Type ICustomTypeProvider.GetCustomType() {
return customType ?? (customType = GetCustomType(GetType(), commandProperties.Values));
}
#else
#if !NETFX_CORE
#region ICustomTypeDescriptor
AttributeCollection ICustomTypeDescriptor.GetAttributes() {
return TypeDescriptor.GetAttributes(this, true);
}
string ICustomTypeDescriptor.GetClassName() {
return TypeDescriptor.GetClassName(this, true);
}
string ICustomTypeDescriptor.GetComponentName() {
return TypeDescriptor.GetComponentName(this, true);
}
TypeConverter ICustomTypeDescriptor.GetConverter() {
return TypeDescriptor.GetConverter(this, true);
}
EventDescriptor ICustomTypeDescriptor.GetDefaultEvent() {
return TypeDescriptor.GetDefaultEvent(this, true);
}
PropertyDescriptor ICustomTypeDescriptor.GetDefaultProperty() {
return TypeDescriptor.GetDefaultProperty(this, true);
}
object ICustomTypeDescriptor.GetEditor(Type editorBaseType) {
return TypeDescriptor.GetEditor(this, editorBaseType, true);
}
EventDescriptorCollection ICustomTypeDescriptor.GetEvents(Attribute[] attributes) {
return TypeDescriptor.GetEvents(this, attributes, true);
}
EventDescriptorCollection ICustomTypeDescriptor.GetEvents() {
return TypeDescriptor.GetEvents(this, true);
}
PropertyDescriptorCollection ICustomTypeDescriptor.GetProperties(Attribute[] attributes) {
return TypeDescriptor.GetProperties(this, attributes, true);
}
PropertyDescriptorCollection properties;
PropertyDescriptorCollection ICustomTypeDescriptor.GetProperties() {
return properties ??
(properties = new PropertyDescriptorCollection(TypeDescriptor.GetProperties(this, true).Cast<PropertyDescriptor>().Concat(commandProperties.Values).ToArray()));
}
object ICustomTypeDescriptor.GetPropertyOwner(PropertyDescriptor pd) {
return this;
}
#endregion
#endif
#endif
#endregion CommandAttributeSupport
#endif
}
#if !SILVERLIGHT && !NETFX_CORE
[Serializable]
#endif
public class CommandAttributeException : Exception {
public CommandAttributeException() { }
public CommandAttributeException(string message)
: base(message) {
}
}
}
| |
//
// ViewsTest.cs
//
// Author:
// Zachary Gramana <[email protected]>
//
// Copyright (c) 2013, 2014 Xamarin Inc (http://www.xamarin.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
/*
* Original iOS version by Jens Alfke
* Ported to Android by Marty Schoch, Traun Leyden
*
* Copyright (c) 2012, 2013, 2014 Couchbase, Inc. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
* except in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the
* License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using Couchbase.Lite;
using Couchbase.Lite.Internal;
using Couchbase.Lite.Util;
using NUnit.Framework;
using Sharpen;
using Newtonsoft.Json.Linq;
using System.Threading;
using Couchbase.Lite.Views;
namespace Couchbase.Lite
{
public class ViewsTest : LiteTestCase
{
public const string Tag = "Views";
[Test]
public void TestViewValueIsEntireDoc()
{
var view = database.GetView("vu");
view.SetMap((doc, emit) => emit(doc["_id"], doc), "0.1");
CreateDocuments(database, 10);
var rows = view.CreateQuery().Run();
foreach (var row in rows) {
Assert.IsNotNull(row.Value);
var dict = row.Value.AsDictionary<string, object>();
Assert.IsNotNull(dict);
Assert.AreEqual(row.Key, dict["_id"]);
}
}
[Test]
public void TestLiveQueryUpdateWhenOptionsChanged()
{
var view = database.GetView("vu");
view.SetMap((doc, emit) =>
emit(doc.Get("sequence"), null), "1");
CreateDocuments(database, 5);
var query = view.CreateQuery();
var result = query.Run();
Assert.AreEqual(5, result.Count);
int expectedKey = 0;
foreach (var row in result) {
Assert.AreEqual(expectedKey++, row.Key);
}
var liveQuery = view.CreateQuery().ToLiveQuery();
var changeCount = 0;
liveQuery.Changed += (sender, e) => changeCount++;
liveQuery.Start();
Thread.Sleep(1000);
Assert.AreEqual(1, changeCount);
Assert.AreEqual(5, liveQuery.Rows.Count);
expectedKey = 0;
foreach (var row in liveQuery.Rows) {
Assert.AreEqual(expectedKey++, row.Key);
}
liveQuery.StartKey = 2;
liveQuery.QueryOptionsChanged();
Thread.Sleep(1000);
Assert.AreEqual(2, changeCount);
Assert.AreEqual(3, liveQuery.Rows.Count);
expectedKey = 2;
foreach (var row in liveQuery.Rows) {
Assert.AreEqual(expectedKey++, row.Key);
}
liveQuery.Stop();
}
[Test]
public void TestQueryDefaultIndexUpdateMode()
{
View view = database.GetView("aview");
Query query = view.CreateQuery();
Assert.AreEqual(IndexUpdateMode.Before, query.IndexUpdateMode);
}
[Test]
public void TestViewCreation()
{
Assert.IsNull(database.GetExistingView("aview"));
var view = database.GetView("aview");
Assert.IsNotNull(view);
Assert.AreEqual(database, view.Database);
Assert.AreEqual("aview", view.Name);
Assert.IsNull(view.Map);
Assert.AreEqual(view, database.GetExistingView("aview"));
//no-op
var changed = view.SetMapReduce((IDictionary<string, object> document, EmitDelegate emitter)=> { }, null, "1");
Assert.IsTrue(changed);
Assert.AreEqual(1, database.GetAllViews().Count);
Assert.AreEqual(view, database.GetAllViews()[0]);
//no-op
changed = view.SetMapReduce((IDictionary<string, object> document, EmitDelegate emitter)=> { }, null, "1");
Assert.IsFalse(changed);
changed = view.SetMapReduce((IDictionary<string, object> document, EmitDelegate emitter)=> { }, null, "2");
//no-op
Assert.IsTrue(changed);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
private RevisionInternal PutDoc(Database db, IDictionary<string, object> props)
{
var rev = new RevisionInternal(props);
var status = new Status();
rev = db.PutRevision(rev, null, false, status);
Assert.IsTrue(status.IsSuccessful);
return rev;
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
private void PutDocViaUntitledDoc(Database db, IDictionary<string, object> props)
{
var document = db.CreateDocument();
document.PutProperties(props);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
private IList<RevisionInternal> PutDocs(Database db)
{
var result = new List<RevisionInternal>();
var dict2 = new Dictionary<string, object>();
dict2["_id"] = "22222";
dict2["key"] = "two";
result.AddItem(PutDoc(db, dict2));
var dict4 = new Dictionary<string, object>();
dict4["_id"] = "44444";
dict4["key"] = "four";
result.AddItem(PutDoc(db, dict4));
var dict1 = new Dictionary<string, object>();
dict1["_id"] = "11111";
dict1["key"] = "one";
result.AddItem(PutDoc(db, dict1));
var dict3 = new Dictionary<string, object>();
dict3["_id"] = "33333";
dict3["key"] = "three";
result.AddItem(PutDoc(db, dict3));
var dict5 = new Dictionary<string, object>();
dict5["_id"] = "55555";
dict5["key"] = "five";
result.AddItem(PutDoc(db, dict5));
return result;
}
// http://wiki.apache.org/couchdb/Introduction_to_CouchDB_views#Linked_documents
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
private IList<RevisionInternal> PutLinkedDocs(Database db)
{
var result = new List<RevisionInternal>();
var dict1 = new Dictionary<string, object>();
dict1["_id"] = "11111";
result.AddItem(PutDoc(db, dict1));
var dict2 = new Dictionary<string, object>();
dict2["_id"] = "22222";
dict2["value"] = "hello";
dict2["ancestors"] = new string[] { "11111" };
result.AddItem(PutDoc(db, dict2));
var dict3 = new Dictionary<string, object>();
dict3["_id"] = "33333";
dict3["value"] = "world";
dict3["ancestors"] = new string[] { "22222", "11111" };
result.AddItem(PutDoc(db, dict3));
return result;
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
public virtual void PutNDocs(Database db, int n)
{
for (int i = 0; i < n; i++)
{
var doc = new Dictionary<string, object>();
doc.Put("_id", string.Format("{0}", i));
var key = new List<string>();
for (int j = 0; j < 256; j++)
{
key.AddItem("key");
}
key.AddItem(string.Format("key-{0}", i));
doc["key"] = key;
PutDocViaUntitledDoc(db, doc);
}
}
public static View CreateView(Database db)
{
var view = db.GetView("aview");
view.SetMapReduce((IDictionary<string, object> document, EmitDelegate emitter)=>
{
Assert.IsNotNull(document["_id"]);
Assert.IsNotNull(document["_rev"]);
if (document["key"] != null)
{
emitter(document["key"], null);
}
}, null, "1");
return view;
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewIndex()
{
int numTimesMapFunctionInvoked = 0;
var dict1 = new Dictionary<string, object>();
dict1["key"] = "one";
var dict2 = new Dictionary<string, object>();
dict2["key"] = "two";
var dict3 = new Dictionary<string, object>();
dict3["key"] = "three";
var dictX = new Dictionary<string, object>();
dictX["clef"] = "quatre";
var rev1 = PutDoc(database, dict1);
var rev2 = PutDoc(database, dict2);
var rev3 = PutDoc(database, dict3);
PutDoc(database, dictX);
var view = database.GetView("aview");
var numTimesInvoked = 0;
MapDelegate mapBlock = (document, emitter) =>
{
numTimesInvoked += 1;
Assert.IsNotNull(document["_id"]);
Assert.IsNotNull(document["_rev"]);
if (document.ContainsKey("key") && document["key"] != null)
{
emitter(document["key"], null);
}
};
view.SetMap(mapBlock, "1");
//Assert.AreEqual(1, view.Id);
Assert.IsTrue(view.IsStale);
view.UpdateIndex();
IList<IDictionary<string, object>> dumpResult = view.Storage.Dump().ToList();
Log.V(Tag, "View dump: " + dumpResult);
Assert.AreEqual(3, dumpResult.Count);
Assert.AreEqual("\"one\"", dumpResult[0]["key"]);
Assert.AreEqual(1, dumpResult[0]["seq"]);
Assert.AreEqual("\"two\"", dumpResult[2]["key"]);
Assert.AreEqual(2, dumpResult[2]["seq"]);
Assert.AreEqual("\"three\"", dumpResult[1]["key"]);
Assert.AreEqual(3, dumpResult[1]["seq"]);
//no-op reindex
Assert.IsFalse(view.IsStale);
view.UpdateIndex();
// Now add a doc and update a doc:
var threeUpdated = new RevisionInternal(rev3.GetDocId(), rev3.GetRevId(), false);
numTimesMapFunctionInvoked = numTimesInvoked;
var newdict3 = new Dictionary<string, object>();
newdict3["key"] = "3hree";
threeUpdated.SetProperties(newdict3);
Status status = new Status();
rev3 = database.PutRevision(threeUpdated, rev3.GetRevId(), false, status);
Assert.IsTrue(status.IsSuccessful);
// Reindex again:
Assert.IsTrue(view.IsStale);
view.UpdateIndex();
// Make sure the map function was only invoked one more time (for the document that was added)
Assert.AreEqual(numTimesMapFunctionInvoked + 1, numTimesInvoked);
var dict4 = new Dictionary<string, object>();
dict4["key"] = "four";
var rev4 = PutDoc(database, dict4);
var twoDeleted = new RevisionInternal(rev2.GetDocId(), rev2.GetRevId(), true);
database.PutRevision(twoDeleted, rev2.GetRevId(), false, status);
Assert.IsTrue(status.IsSuccessful);
// Reindex again:
Assert.IsTrue(view.IsStale);
view.UpdateIndex();
dumpResult = view.Storage.Dump().ToList();
Log.V(Tag, "View dump: " + dumpResult);
Assert.AreEqual(3, dumpResult.Count);
Assert.AreEqual("\"one\"", dumpResult[2]["key"]);
Assert.AreEqual(1, dumpResult[2]["seq"]);
Assert.AreEqual("\"3hree\"", dumpResult[0]["key"]);
Assert.AreEqual(5, dumpResult[0]["seq"]);
Assert.AreEqual("\"four\"", dumpResult[1]["key"]);
Assert.AreEqual(6, dumpResult[1]["seq"]);
// Now do a real query:
IList<QueryRow> rows = view.QueryWithOptions(null).ToList();
Assert.AreEqual(3, rows.Count);
Assert.AreEqual("one", rows[2].Key);
Assert.AreEqual(rev1.GetDocId(), rows[2].DocumentId);
Assert.AreEqual("3hree", rows[0].Key);
Assert.AreEqual(rev3.GetDocId(), rows[0].DocumentId);
Assert.AreEqual("four", rows[1].Key);
Assert.AreEqual(rev4.GetDocId(), rows[1].DocumentId);
view.DeleteIndex();
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewQuery()
{
PutDocs(database);
var view = CreateView(database);
view.UpdateIndex();
// Query all rows:
QueryOptions options = new QueryOptions();
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
var expectedRows = new List<object>();
var dict5 = new Dictionary<string, object>();
dict5["id"] = "55555";
dict5["key"] = "five";
expectedRows.AddItem(dict5);
var dict4 = new Dictionary<string, object>();
dict4["id"] = "44444";
dict4["key"] = "four";
expectedRows.AddItem(dict4);
var dict1 = new Dictionary<string, object>();
dict1["id"] = "11111";
dict1["key"] = "one";
expectedRows.AddItem(dict1);
var dict3 = new Dictionary<string, object>();
dict3["id"] = "33333";
dict3["key"] = "three";
expectedRows.AddItem(dict3);
var dict2 = new Dictionary<string, object>();
dict2["id"] = "22222";
dict2["key"] = "two";
expectedRows.AddItem(dict2);
Assert.AreEqual(5, rows.Count);
Assert.AreEqual(dict5["key"], rows[0].Key);
Assert.AreEqual(dict4["key"], rows[1].Key);
Assert.AreEqual(dict1["key"], rows[2].Key);
Assert.AreEqual(dict3["key"], rows[3].Key);
Assert.AreEqual(dict2["key"], rows[4].Key);
// Start/end key query:
options = new QueryOptions();
options.StartKey = "a";
options.EndKey = "one";
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<object>();
expectedRows.AddItem(dict5);
expectedRows.AddItem(dict4);
expectedRows.AddItem(dict1);
Assert.AreEqual(3, rows.Count);
Assert.AreEqual(dict5["key"], rows[0].Key);
Assert.AreEqual(dict4["key"], rows[1].Key);
Assert.AreEqual(dict1["key"], rows[2].Key);
// Start/end query without inclusive end:
options.InclusiveEnd = false;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<object>();
expectedRows.AddItem(dict5);
expectedRows.AddItem(dict4);
Assert.AreEqual(2, rows.Count);
Assert.AreEqual(dict5["key"], rows[0].Key);
Assert.AreEqual(dict4["key"], rows[1].Key);
// Reversed:
options.Descending = true;
options.StartKey = "o";
options.EndKey = "five";
options.InclusiveEnd = true;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<object>();
expectedRows.AddItem(dict4);
expectedRows.AddItem(dict5);
Assert.AreEqual(2, rows.Count);
Assert.AreEqual(dict4["key"], rows[0].Key);
Assert.AreEqual(dict5["key"], rows[1].Key);
// Reversed, no inclusive end:
options.InclusiveEnd = false;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<object>();
expectedRows.AddItem(dict4);
Assert.AreEqual(1, rows.Count);
Assert.AreEqual(dict4["key"], rows[0].Key);
// Specific keys: (note that rows should be in same order as input keys, not sorted)
options = new QueryOptions();
var keys = new List<object>();
keys.AddItem("two");
keys.AddItem("four");
options.Keys = keys;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<object>();
expectedRows.AddItem(dict4);
expectedRows.AddItem(dict2);
Assert.AreEqual(2, rows.Count);
Assert.AreEqual(dict2["key"], rows[0].Key);
Assert.AreEqual(dict4["key"], rows[1].Key);
}
[Test]
public void TestLiveQueryStartEndKey()
{
var view = CreateView(database);
var query = view.CreateQuery();
query.StartKey = "one";
query.EndKey = "one\uFEFF";
var liveQuery = query.ToLiveQuery();
Assert.IsNotNull(liveQuery.StartKey);
Assert.IsNotNull(liveQuery.EndKey);
liveQuery.Start();
Thread.Sleep(2000);
Assert.AreEqual(0, liveQuery.Rows.Count);
PutDocs(database);
Thread.Sleep(2000);
Assert.AreEqual(1, liveQuery.Rows.Count);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestAllDocsQuery()
{
var docs = PutDocs(database);
var expectedRowBase = new List<IDictionary<string, object>>(docs.Count);
foreach (RevisionInternal rev in docs)
{
expectedRowBase.Add(new Dictionary<string, object> {
{ "id", rev.GetDocId() },
{ "key", rev.GetDocId() },
{ "value", new Dictionary<string, object> {
{ "rev", rev.GetRevId() }
}
}
});
}
// Create a conflict, won by the old revision:
var props = new Dictionary<string, object> {
{ "_id", "44444" },
{ "_rev", "1-00" }, // lower revID, will lose conflict
{ "key", "40ur" }
};
var leaf2 = new RevisionInternal(props);
database.ForceInsert(leaf2, null, null);
Assert.AreEqual(docs[1].GetRevId(), database.GetDocument("44444", null, true).GetRevId());
// Query all rows:
var options = new QueryOptions();
var allDocs = database.GetAllDocs(options);
var expectedRows = new List<IDictionary<string, object>> {
expectedRowBase[2],
expectedRowBase[0],
expectedRowBase[3],
expectedRowBase[1],
expectedRowBase[4]
};
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Limit:
options.Limit = 1;
allDocs = database.GetAllDocs(options);
expectedRows = new List<IDictionary<string, object>>() { expectedRowBase[2] };
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Limit+Skip:
options.Skip = 2;
allDocs = database.GetAllDocs(options);
expectedRows = new List<IDictionary<string, object>>() { expectedRowBase[3] };
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Start/end key query:
options = new QueryOptions();
options.StartKey = "2";
options.EndKey = "44444";
allDocs = database.GetAllDocs(options);
expectedRows = new List<IDictionary<string, object>>() { expectedRowBase[0], expectedRowBase[3], expectedRowBase[1] };
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Start/end query without inclusive end:
options.InclusiveEnd = false;
allDocs = database.GetAllDocs(options);
expectedRows = new List<IDictionary<string, object>>() { expectedRowBase[0], expectedRowBase[3] };
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Get zero specific documents:
options = new QueryOptions();
options.Keys = new List<object>();
allDocs = database.GetAllDocs(options);
Assert.IsNull(allDocs);
// Get specific documents:
options = new QueryOptions();
options.Keys = new List<object> {
expectedRowBase[2].GetCast<string>("id"),
expectedRowBase[3].GetCast<string>("id")
};
allDocs = database.GetAllDocs(options);
expectedRows = new List<IDictionary<string, object>>() { expectedRowBase[2], expectedRowBase[3] };
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Delete a document:
var del = docs[0];
del = new RevisionInternal(del.GetDocId(), del.GetRevId(), true);
var status = new Status();
del = database.PutRevision(del, del.GetRevId(), false, status);
Assert.AreEqual(StatusCode.Ok, status.Code);
// Get deleted doc, and one bogus one:
options = new QueryOptions();
options.Keys = new List<object> { "BOGUS", expectedRowBase[0].GetCast<string>("id") };
allDocs = database.GetAllDocs(options);
var expectedResult = new List<IDictionary<string, object>> {
new Dictionary<string, object> {
{ "key", "BOGUS" },
{ "error", "not_found" }
},
new Dictionary<string, object> {
{ "id", del.GetDocId() },
{ "key", del.GetDocId() },
{ "value", new Dictionary<string, object> {
{ "rev", del.GetRevId() },
{ "deleted", true }
}
}
}
};
Assert.AreEqual(expectedResult, RowsToDicts(allDocs));
// Get conflicts:
options = new QueryOptions();
options.AllDocsMode = AllDocsMode.ShowConflicts;
allDocs = database.GetAllDocs(options);
var curRevId = docs[1].GetRevId();
var expectedConflict1 = new Dictionary<string, object> {
{ "id", "44444" },
{ "key", "44444" },
{ "value", new Dictionary<string, object> {
{ "rev", curRevId },
{ "_conflicts", new List<string> {
curRevId, "1-00"
}
}
}
}
};
expectedRows = new List<IDictionary<string, object>>() { expectedRowBase[2], expectedRowBase[3], expectedConflict1,
expectedRowBase[4]
};
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
// Get _only_ conflicts:
options.AllDocsMode = AllDocsMode.OnlyConflicts;
allDocs = database.GetAllDocs(options);
expectedRows = new List<IDictionary<string, object>>() { expectedConflict1 };
Assert.AreEqual(expectedRows, RowsToDicts(allDocs));
}
private IDictionary<string, object> CreateExpectedQueryResult(IList<QueryRow> rows, int offset)
{
var result = new Dictionary<string, object>();
result["rows"] = rows;
result["total_rows"] = rows.Count;
result["offset"] = offset;
return result;
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewReduce()
{
var docProperties1 = new Dictionary<string, object>();
docProperties1["_id"] = "CD";
docProperties1["cost"] = 8.99;
PutDoc(database, docProperties1);
var docProperties2 = new Dictionary<string, object>();
docProperties2["_id"] = "App";
docProperties2["cost"] = 1.95;
PutDoc(database, docProperties2);
IDictionary<string, object> docProperties3 = new Dictionary<string, object>();
docProperties3["_id"] = "Dessert";
docProperties3["cost"] = 6.50;
PutDoc(database, docProperties3);
View view = database.GetView("totaler");
view.SetMapReduce((document, emitter) => {
Assert.IsNotNull (document.Get ("_id"));
Assert.IsNotNull (document.Get ("_rev"));
object cost = document.Get ("cost");
if (cost != null) {
emitter (document.Get ("_id"), cost);
}
}, BuiltinReduceFunctions.Sum, "1");
view.UpdateIndex();
IList<IDictionary<string, object>> dumpResult = view.Storage.Dump().ToList();
Log.V(Tag, "View dump: " + dumpResult);
Assert.AreEqual(3, dumpResult.Count);
Assert.AreEqual("\"App\"", dumpResult[0]["key"]);
Assert.AreEqual("1.95", dumpResult[0]["val"]);
Assert.AreEqual(2, dumpResult[0]["seq"]);
Assert.AreEqual("\"CD\"", dumpResult[1]["key"]);
Assert.AreEqual("8.99", dumpResult[1]["val"]);
Assert.AreEqual(1, dumpResult[1]["seq"]);
Assert.AreEqual("\"Dessert\"", dumpResult[2]["key"]);
Assert.AreEqual("6.5", dumpResult[2]["val"]);
Assert.AreEqual(3, dumpResult[2]["seq"]);
QueryOptions options = new QueryOptions();
options.Reduce = true;
IList<QueryRow> reduced = view.QueryWithOptions(options).ToList();
Assert.AreEqual(1, reduced.Count);
object value = reduced[0].Value;
double numberValue = (double)value;
Assert.IsTrue(Math.Abs(numberValue - 17.44) < 0.001);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestIndexUpdateMode()
{
View view = CreateView(database);
Query query = view.CreateQuery();
query.IndexUpdateMode = IndexUpdateMode.Before;
int numRowsBefore = query.Run().Count;
Assert.AreEqual(0, numRowsBefore);
// do a query and force re-indexing, number of results should be +4
PutNDocs(database, 1);
query.IndexUpdateMode = IndexUpdateMode.Before;
Assert.AreEqual(1, query.Run().Count);
// do a query without re-indexing, number of results should be the same
PutNDocs(database, 4);
query.IndexUpdateMode = IndexUpdateMode.Never;
Assert.AreEqual(1, query.Run().Count);
// do a query and force re-indexing, number of results should be +4
query.IndexUpdateMode = IndexUpdateMode.Before;
Assert.AreEqual(5, query.Run().Count);
// do a query which will kick off an async index
PutNDocs(database, 1);
query.IndexUpdateMode = IndexUpdateMode.After;
query.Run();
// wait until indexing is (hopefully) done
try
{
Thread.Sleep(1 * 1000);
}
catch (Exception e)
{
Sharpen.Runtime.PrintStackTrace(e);
}
Assert.AreEqual(6, query.Run().Count);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewGrouped()
{
IDictionary<string, object> docProperties1 = new Dictionary<string, object>();
docProperties1["_id"] = "1";
docProperties1["artist"] = "Gang Of Four";
docProperties1["album"] = "Entertainment!";
docProperties1["track"] = "Ether";
docProperties1["time"] = 231;
PutDoc(database, docProperties1);
IDictionary<string, object> docProperties2 = new Dictionary<string, object>();
docProperties2["_id"] = "2";
docProperties2["artist"] = "Gang Of Four";
docProperties2["album"] = "Songs Of The Free";
docProperties2["track"] = "I Love A Man In Uniform";
docProperties2["time"] = 248;
PutDoc(database, docProperties2);
IDictionary<string, object> docProperties3 = new Dictionary<string, object>();
docProperties3["_id"] = "3";
docProperties3["artist"] = "Gang Of Four";
docProperties3["album"] = "Entertainment!";
docProperties3["track"] = "Natural's Not In It";
docProperties3["time"] = 187;
PutDoc(database, docProperties3);
IDictionary<string, object> docProperties4 = new Dictionary<string, object>();
docProperties4["_id"] = "4";
docProperties4["artist"] = "PiL";
docProperties4["album"] = "Metal Box";
docProperties4["track"] = "Memories";
docProperties4["time"] = 309;
PutDoc(database, docProperties4);
IDictionary<string, object> docProperties5 = new Dictionary<string, object>();
docProperties5["_id"] = "5";
docProperties5["artist"] = "Gang Of Four";
docProperties5["album"] = "Entertainment!";
docProperties5["track"] = "Not Great Men";
docProperties5["time"] = 187;
PutDoc(database, docProperties5);
View view = database.GetView("grouper");
view.SetMapReduce((document, emitter) =>
{
IList<object> key = new List<object>();
key.AddItem(document["artist"]);
key.AddItem(document["album"]);
key.AddItem(document["track"]);
emitter(key, document["time"]);
}, BuiltinReduceFunctions.Sum, "1");
view.UpdateIndex();
QueryOptions options = new QueryOptions();
options.Reduce = true;
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
IList<IDictionary<string, object>> expectedRows = new List<IDictionary<string, object>>();
IDictionary<string, object> row1 = new Dictionary<string, object>();
row1["key"] = null;
row1["value"] = 1162.0;
expectedRows.AddItem(row1);
Assert.AreEqual(row1["key"], rows[0].Key);
Assert.AreEqual(row1["value"], rows[0].Value);
//now group
options.Group = true;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<IDictionary<string, object>>();
row1 = new Dictionary<string, object>();
IList<string> key1 = new List<string>();
key1.AddItem("Gang Of Four");
key1.AddItem("Entertainment!");
key1.AddItem("Ether");
row1["key"] = key1;
row1["value"] = 231.0;
expectedRows.AddItem(row1);
IDictionary<string, object> row2 = new Dictionary<string, object>();
IList<string> key2 = new List<string>();
key2.AddItem("Gang Of Four");
key2.AddItem("Entertainment!");
key2.AddItem("Natural's Not In It");
row2["key"] = key2;
row2["value"] = 187.0;
expectedRows.AddItem(row2);
IDictionary<string, object> row3 = new Dictionary<string, object>();
IList<string> key3 = new List<string>();
key3.AddItem("Gang Of Four");
key3.AddItem("Entertainment!");
key3.AddItem("Not Great Men");
row3["key"] = key3;
row3["value"] = 187.0;
expectedRows.AddItem(row3);
IDictionary<string, object> row4 = new Dictionary<string, object>();
IList<string> key4 = new List<string>();
key4.AddItem("Gang Of Four");
key4.AddItem("Songs Of The Free");
key4.AddItem("I Love A Man In Uniform");
row4["key"] = key4;
row4["value"] = 248.0;
expectedRows.AddItem(row4);
IDictionary<string, object> row5 = new Dictionary<string, object>();
IList<string> key5 = new List<string>();
key5.AddItem("PiL");
key5.AddItem("Metal Box");
key5.AddItem("Memories");
row5["key"] = key5;
row5["value"] = 309.0;
expectedRows.AddItem(row5);
Assert.AreEqual(row1["key"], rows[0].Key.AsList<string>());
Assert.AreEqual(row1["value"], rows[0].Value);
Assert.AreEqual(row2["key"], rows[1].Key.AsList<string>());
Assert.AreEqual(row2["value"], rows[1].Value);
Assert.AreEqual(row3["key"], rows[2].Key.AsList<string>());
Assert.AreEqual(row3["value"], rows[2].Value);
Assert.AreEqual(row4["key"], rows[3].Key.AsList<string>());
Assert.AreEqual(row4["value"], rows[3].Value);
Assert.AreEqual(row5["key"], rows[4].Key.AsList<string>());
Assert.AreEqual(row5["value"], rows[4].Value);
//group level 1
options.GroupLevel = 1;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<IDictionary<string, object>>();
row1 = new Dictionary<string, object>();
key1 = new List<string>();
key1.AddItem("Gang Of Four");
row1["key"] = key1;
row1["value"] = 853.0;
expectedRows.AddItem(row1);
row2 = new Dictionary<string, object>();
key2 = new List<string>();
key2.AddItem("PiL");
row2["key"] = key2;
row2["value"] = 309.0;
expectedRows.AddItem(row2);
Assert.AreEqual(row1["key"], rows[0].Key.AsList<object>());
Assert.AreEqual(row1["value"], rows[0].Value);
Assert.AreEqual(row2["key"], rows[1].Key.AsList<object>());
Assert.AreEqual(row2["value"], rows[1].Value);
//group level 2
options.GroupLevel = 2;
rows = view.QueryWithOptions(options).ToList();
expectedRows = new List<IDictionary<string, object>>();
row1 = new Dictionary<string, object>();
key1 = new List<string>();
key1.AddItem("Gang Of Four");
key1.AddItem("Entertainment!");
row1["key"] = key1;
row1["value"] = 605.0;
expectedRows.AddItem(row1);
row2 = new Dictionary<string, object>();
key2 = new List<string>();
key2.AddItem("Gang Of Four");
key2.AddItem("Songs Of The Free");
row2["key"] = key2;
row2["value"] = 248.0;
expectedRows.AddItem(row2);
row3 = new Dictionary<string, object>();
key3 = new List<string>();
key3.AddItem("PiL");
key3.AddItem("Metal Box");
row3["key"] = key3;
row3["value"] = 309.0;
expectedRows.AddItem(row3);
Assert.AreEqual(row1["key"], rows[0].Key.AsList<object>());
Assert.AreEqual(row1["value"], rows[0].Value);
Assert.AreEqual(row2["key"], rows[1].Key.AsList<object>());
Assert.AreEqual(row2["value"], rows[1].Value);
Assert.AreEqual(row3["key"], rows[2].Key.AsList<object>());
Assert.AreEqual(row3["value"], rows[2].Value);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewGroupedStrings()
{
IDictionary<string, object> docProperties1 = new Dictionary<string, object>();
docProperties1["name"] = "Alice";
PutDoc(database, docProperties1);
IDictionary<string, object> docProperties2 = new Dictionary<string, object>();
docProperties2["name"] = "Albert";
PutDoc(database, docProperties2);
IDictionary<string, object> docProperties3 = new Dictionary<string, object>();
docProperties3["name"] = "Naomi";
PutDoc(database, docProperties3);
IDictionary<string, object> docProperties4 = new Dictionary<string, object>();
docProperties4["name"] = "Jens";
PutDoc(database, docProperties4);
IDictionary<string, object> docProperties5 = new Dictionary<string, object>();
docProperties5["name"] = "Jed";
PutDoc(database, docProperties5);
View view = database.GetView("default/names");
view.SetMapReduce((document, emitter) =>
{
string name = (string)document["name"];
if (name != null)
{
emitter(name.Substring(0, 1), 1);
}
}, BuiltinReduceFunctions.Sum, "1.0");
view.UpdateIndex();
QueryOptions options = new QueryOptions();
options.GroupLevel = 1;
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
IList<IDictionary<string, object>> expectedRows = new List<IDictionary<string, object>>();
IDictionary<string, object> row1 = new Dictionary<string, object>();
row1["key"] = "A";
row1["value"] = 2;
expectedRows.AddItem(row1);
IDictionary<string, object> row2 = new Dictionary<string, object>();
row2["key"] = "J";
row2["value"] = 2;
expectedRows.AddItem(row2);
IDictionary<string, object> row3 = new Dictionary<string, object>();
row3["key"] = "N";
row3["value"] = 1;
expectedRows.AddItem(row3);
Assert.AreEqual(row1["key"], rows[0].Key);
Assert.AreEqual(row1["value"], rows[0].Value);
Assert.AreEqual(row2["key"], rows[1].Key);
Assert.AreEqual(row2["value"], rows[1].Value);
Assert.AreEqual(row3["key"], rows[2].Key);
Assert.AreEqual(row3["value"], rows[2].Value);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewCollation()
{
IList<object> list1 = new List<object>();
list1.AddItem("a");
IList<object> list2 = new List<object>();
list2.AddItem("b");
IList<object> list3 = new List<object>();
list3.AddItem("b");
list3.AddItem("c");
IList<object> list4 = new List<object>();
list4.AddItem("b");
list4.AddItem("c");
list4.AddItem("a");
IList<object> list5 = new List<object>();
list5.AddItem("b");
list5.AddItem("d");
IList<object> list6 = new List<object>();
list6.AddItem("b");
list6.AddItem("d");
list6.AddItem("e");
// Based on CouchDB's "view_collation.js" test
IList<object> testKeys = new List<object>();
testKeys.AddItem(null);
testKeys.AddItem(false);
testKeys.AddItem(true);
testKeys.AddItem(0);
testKeys.AddItem(2.5);
testKeys.AddItem(10);
testKeys.AddItem(" ");
testKeys.AddItem("_");
testKeys.AddItem("~");
testKeys.AddItem("a");
testKeys.AddItem("A");
testKeys.AddItem("aa");
testKeys.AddItem("b");
testKeys.AddItem("B");
testKeys.AddItem("ba");
testKeys.AddItem("bb");
testKeys.AddItem(list1);
testKeys.AddItem(list2);
testKeys.AddItem(list3);
testKeys.AddItem(list4);
testKeys.AddItem(list5);
testKeys.AddItem(list6);
int i = 0;
foreach (object key in testKeys)
{
IDictionary<string, object> docProperties = new Dictionary<string, object>();
docProperties.Put("_id", Sharpen.Extensions.ToString(i++));
docProperties["name"] = key;
PutDoc(database, docProperties);
}
View view = database.GetView("default/names");
view.SetMapReduce((IDictionary<string, object> document, EmitDelegate emitter) =>
emitter(document["name"], null), null, "1.0");
QueryOptions options = new QueryOptions();
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
i = 0;
foreach (QueryRow row in rows)
{
Assert.AreEqual(testKeys[i++], row.Key);
}
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
[Test]
public void TestViewCollationRaw()
{
IList<object> list1 = new List<object>();
list1.AddItem("a");
IList<object> list2 = new List<object>();
list2.AddItem("b");
IList<object> list3 = new List<object>();
list3.AddItem("b");
list3.AddItem("c");
IList<object> list4 = new List<object>();
list4.AddItem("b");
list4.AddItem("c");
list4.AddItem("a");
IList<object> list5 = new List<object>();
list5.AddItem("b");
list5.AddItem("d");
IList<object> list6 = new List<object>();
list6.AddItem("b");
list6.AddItem("d");
list6.AddItem("e");
// Based on CouchDB's "view_collation.js" test
IList<object> testKeys = new List<object>();
testKeys.AddItem(0);
testKeys.AddItem(2.5);
testKeys.AddItem(10);
testKeys.AddItem(false);
testKeys.AddItem(null);
testKeys.AddItem(true);
testKeys.AddItem(list1);
testKeys.AddItem(list2);
testKeys.AddItem(list3);
testKeys.AddItem(list4);
testKeys.AddItem(list5);
testKeys.AddItem(list6);
testKeys.AddItem(" ");
testKeys.AddItem("A");
testKeys.AddItem("B");
testKeys.AddItem("_");
testKeys.AddItem("a");
testKeys.AddItem("aa");
testKeys.AddItem("b");
testKeys.AddItem("ba");
testKeys.AddItem("bb");
testKeys.AddItem("~");
int i = 0;
foreach (object key in testKeys)
{
IDictionary<string, object> docProperties = new Dictionary<string, object>();
docProperties.Put("_id", Sharpen.Extensions.ToString(i++));
docProperties["name"] = key;
PutDoc(database, docProperties);
}
View view = database.GetView("default/names");
view.SetMapReduce((document, emitter) =>
emitter(document["name"], null), null, "1.0");
view.Collation = ViewCollation.Raw;
QueryOptions options = new QueryOptions();
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
i = 0;
foreach (QueryRow row in rows)
{
Assert.AreEqual(testKeys[i++], row.Key);
}
database.Close();
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
public virtual void TestLargerViewQuery()
{
PutNDocs(database, 4);
View view = CreateView(database);
view.UpdateIndex();
// Query all rows:
QueryOptions options = new QueryOptions();
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
Assert.IsNotNull(rows);
Assert.IsTrue(rows.Count > 0);
}
/// <exception cref="Couchbase.Lite.CouchbaseLiteException"></exception>
public virtual void TestViewLinkedDocs()
{
PutLinkedDocs(database);
View view = database.GetView("linked");
view.SetMapReduce((document, emitter) =>
{
if (document.ContainsKey("value"))
{
emitter(new object[] { document["value"], 0 }, null);
}
if (document.ContainsKey("ancestors"))
{
IList<object> ancestors = (IList<object>)document["ancestors"];
for (int i = 0; i < ancestors.Count; i++)
{
IDictionary<string, object> value = new Dictionary<string, object>();
value["_id"] = ancestors[i];
emitter(new object[] { document["value"], i + 1 }, value);
}
}
}, null, "1.0");
view.UpdateIndex();
QueryOptions options = new QueryOptions();
options.IncludeDocs = true;
// required for linked documents
IList<QueryRow> rows = view.QueryWithOptions(options).ToList();
Assert.IsNotNull(rows);
Assert.AreEqual(5, rows.Count);
object[][] expected = new object[][] {
new object[] { "22222", "hello", 0, null, "22222" },
new object[] { "22222", "hello", 1, "11111", "11111" },
new object[] { "33333", "world", 0, null, "33333" },
new object[] { "33333", "world", 1, "22222", "22222" },
new object[] { "33333", "world", 2, "11111", "11111" } };
for (int i = 0; i < rows.Count; i++)
{
QueryRow row = rows[i];
IDictionary<string, object> rowAsJson = row.AsJSONDictionary();
Log.D(Tag, string.Empty + rowAsJson);
IList<object> key = (IList<object>)rowAsJson["key"];
IDictionary<string, object> doc = (IDictionary<string, object>)rowAsJson.Get("doc");
string id = (string)rowAsJson["id"];
Assert.AreEqual(expected[i][0], id);
Assert.AreEqual(2, key.Count);
Assert.AreEqual(expected[i][1], key[0]);
Assert.AreEqual(expected[i][2], key[1]);
if (expected[i][3] == null)
{
Assert.IsNull(row.Value);
}
else
{
Assert.AreEqual(expected[i][3], ((IDictionary<string, object>)row.Value)["_id"]);
}
Assert.AreEqual(expected[i][4], doc["_id"]);
}
}
[Test]
public void TestViewUpdateIndexWithLiveQuery()
{
var view = database.GetView("TestViewUpdateWithLiveQuery");
MapDelegate mapBlock = (document, emitter) => emitter(document["name"], null);
view.SetMap(mapBlock, "1.0");
var rowCountAlwaysOne = true;
var liveQuery = view.CreateQuery().ToLiveQuery();
liveQuery.Changed += (sender, e) =>
{
var count = e.Rows.Count;
if (count > 0)
{
rowCountAlwaysOne = rowCountAlwaysOne && (count == 1);
}
};
liveQuery.Start();
var properties = new Dictionary<string, object>();
properties.Put("name", "test");
SavedRevision rev = null;
database.RunInTransaction(() =>
{
var doc = database.CreateDocument();
rev = doc.PutProperties(properties);
return true;
});
for (var i = 0; i < 50; i++) {
rev = rev.CreateRevision(properties);
}
// Sleep to ensure that the LiveQuery is done all of its async operations.
Thread.Sleep(8000);
liveQuery.Stop();
Assert.IsTrue(rowCountAlwaysOne);
}
[Test]
public void TestRunLiveQueriesWithReduce()
{
var view = database.GetView("vu");
view.SetMapReduce((document, emit) => emit(document["sequence"], 1),
BuiltinReduceFunctions.Sum, "1");
var query = view.CreateQuery().ToLiveQuery();
var view1 = database.GetView("vu1");
view1.SetMapReduce((document, emit) => emit(document["sequence"], 1),
BuiltinReduceFunctions.Sum, "1");
var query1 = view1.CreateQuery().ToLiveQuery();
const Int32 numDocs = 10;
CreateDocumentsAsync(database, numDocs);
Assert.IsNull(query.Rows);
query.Start();
var gotExpectedQueryResult = new CountdownEvent(1);
query.Changed += (sender, e) =>
{
Assert.IsNull(e.Error);
if (e.Rows.Count == 1 && Convert.ToInt32(e.Rows.GetRow(0).Value) == numDocs)
{
gotExpectedQueryResult.Signal();
}
};
var success = gotExpectedQueryResult.Wait(TimeSpan.FromSeconds(60));
Assert.IsTrue(success);
query.Stop();
query1.Start();
CreateDocumentsAsync(database, numDocs + 5); //10 + 10 + 5
var gotExpectedQuery1Result = new CountdownEvent(1);
query1.Changed += (sender, e) =>
{
Assert.IsNull(e.Error);
if (e.Rows.Count == 1 && Convert.ToInt32(e.Rows.GetRow(0).Value) == (2 * numDocs) + 5)
{
gotExpectedQuery1Result.Signal();
}
};
success = gotExpectedQuery1Result.Wait(TimeSpan.FromSeconds(10));
Assert.IsTrue(success);
query1.Stop();
Assert.AreEqual((2 * numDocs) + 5, database.DocumentCount);
}
[Test]
public void TestViewIndexSkipsDesignDocs()
{
var view = CreateView(database);
var designDoc = new Dictionary<string, object>()
{
{"_id", "_design/test"},
{"key", "value"}
};
PutDoc(database, designDoc);
view.UpdateIndex();
var rows = view.QueryWithOptions(null);
Assert.AreEqual(0, rows.Count());
}
[Test]
public void TestViewNumericKeys() {
var dict = new Dictionary<string, object>()
{
{"_id", "22222"},
{"referenceNumber", 33547239},
{"title", "this is the title"}
};
PutDoc(database, dict);
var view = CreateView(database);
view.SetMap((document, emit) =>
{
if (document.ContainsKey("referenceNumber"))
{
emit(document["referenceNumber"], document);
}
}, "1");
var query = view.CreateQuery();
query.StartKey = 33547239;
query.EndKey = 33547239;
var rows = query.Run();
Assert.AreEqual(1, rows.Count());
Assert.AreEqual(33547239, rows.GetRow(0).Key);
}
[Test]
public void TestViewQueryStartKeyDocID()
{
PutDocs(database);
var result = new List<RevisionInternal>();
var dict = new Dictionary<string, object>()
{
{"_id", "11112"},
{"key", "one"}
};
result.Add(PutDoc(database, dict));
var view = CreateView(database);
view.UpdateIndex();
var options = new QueryOptions();
options.StartKey = "one";
options.StartKeyDocId = "11112";
options.EndKey = "three";
var rows = view.QueryWithOptions(options).ToList<QueryRow>();
Assert.AreEqual(2, rows.Count);
Assert.AreEqual("11112", rows[0].DocumentId);
Assert.AreEqual("one", rows[0].Key);
Assert.AreEqual("33333", rows[1].DocumentId);
Assert.AreEqual("three", rows[1].Key);
options = new QueryOptions();
options.EndKey = "one";
options.EndKeyDocId = "11111";
rows = view.QueryWithOptions(options).ToList<QueryRow>();
Assert.AreEqual(3, rows.Count);
Assert.AreEqual("55555", rows[0].DocumentId);
Assert.AreEqual("five", rows[0].Key);
Assert.AreEqual("44444", rows[1].DocumentId);
Assert.AreEqual("four", rows[1].Key);
Assert.AreEqual("11111", rows[2].DocumentId);
Assert.AreEqual("one", rows[2].Key);
options.StartKey = "one";
options.StartKeyDocId = "11111";
rows = view.QueryWithOptions(options).ToList<QueryRow>();
Assert.AreEqual(1, rows.Count);
Assert.AreEqual("11111", rows[0].DocumentId);
Assert.AreEqual("one", rows[0].Key);
}
private SavedRevision CreateTestRevisionNoConflicts(Document doc, string val) {
var unsavedRev = doc.CreateRevision();
var props = new Dictionary<string, object>()
{
{"key", val}
};
unsavedRev.SetUserProperties(props);
return unsavedRev.Save();
}
[Test]
public void TestViewWithConflict() {
// Create doc and add some revs
var doc = database.CreateDocument();
var rev1 = CreateTestRevisionNoConflicts(doc, "1");
Assert.IsNotNull(rev1);
var rev2a = CreateTestRevisionNoConflicts(doc, "2a");
Assert.IsNotNull(rev2a);
var rev3 = CreateTestRevisionNoConflicts(doc, "3");
Assert.IsNotNull(rev3);
// index the view
var view = CreateView(database);
var rows = view.CreateQuery().Run();
Assert.AreEqual(1, rows.Count);
var row = rows.GetRow(0);
Assert.AreEqual(row.Key, "3");
// TODO: Why is this null?
//Assert.IsNotNull(row.DocumentRevisionId);
// Create a conflict
var rev2bUnsaved = rev1.CreateRevision();
var props = new Dictionary<string, object>()
{
{"key", "2b"}
};
rev2bUnsaved.SetUserProperties(props);
var rev2b = rev2bUnsaved.Save(true);
Assert.IsNotNull(rev2b);
// re-run query
view.UpdateIndex();
rows = view.CreateQuery().Run();
// we should only see one row, with key=3.
// if we see key=2b then it's a bug.
Assert.AreEqual(1, rows.Count);
row = rows.GetRow(0);
Assert.AreEqual(row.Key, "3");
}
[Test]
public void TestMultipleQueriesOnSameView()
{
var view = database.GetView("view1");
view.SetMapReduce((doc, emit) =>
{
emit(doc["jim"], doc["_id"]);
}, (keys, vals, rereduce) =>
{
return keys.Count();
}, "1");
var query1 = view.CreateQuery().ToLiveQuery();
query1.Start();
var query2 = view.CreateQuery().ToLiveQuery();
query2.Start();
var docIdTimestamp = Convert.ToString(Runtime.CurrentTimeMillis());
for(int i = 0; i < 50; i++) {
database.GetDocument(string.Format("doc{0}-{1}", i, docIdTimestamp)).PutProperties(new Dictionary<string, object> { {
"jim",
"borden"
} });
}
Thread.Sleep(5000);
Assert.AreEqual(50, view.TotalRows);
Assert.AreEqual(50, view.LastSequenceIndexed);
query1.Stop();
for(int i = 50; i < 60; i++) {
database.GetDocument(string.Format("doc{0}-{1}", i, docIdTimestamp)).PutProperties(new Dictionary<string, object> { {
"jim",
"borden"
} });
if (i == 55) {
query1.Start();
}
}
Thread.Sleep(5000);
Assert.AreEqual(60, view.TotalRows);
Assert.AreEqual(60, view.LastSequenceIndexed);
}
private IList<IDictionary<string, object>> RowsToDicts(IEnumerable<QueryRow> allDocs)
{
Assert.IsNotNull(allDocs);
var rows = new List<IDictionary<string, object>>();
foreach (var row in allDocs) {
rows.Add(row.AsJSONDictionary());
}
return rows;
}
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.Data.dll
// Description: The data access libraries for the DotSpatial project.
//
// ********************************************************************************************************
//
// The Original Code is DotSpatial
//
// The Initial Developer of this Original Code is Ted Dunsford. Created in January, 2008.
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
//
// ********************************************************************************************************
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Diagnostics;
using System.IO;
using System.Linq;
using DotSpatial.Projections;
using DotSpatial.Serialization;
namespace DotSpatial.Data
{
/// <summary>
/// This is a generic shapefile that is inherited by other specific shapefile types.
/// </summary>
public class Shapefile : FeatureSet
{
#region Private Variables
// Stores extents and some very basic info common to all shapefiles
private AttributeTable _attributeTable;
private int _bufferSize = 1; // The buffer is approximately how much memory in bytes will be loaded at any one time
private ShapefileHeader _header;
#endregion
#region Constructors
/// <summary>
/// When creating a new shapefile, this simply prevents the basic values from being null.
/// </summary>
public Shapefile()
{
Configure();
}
/// <summary>
/// Creates a new shapefile that has a specific feature type
/// </summary>
/// <param name="featureType"></param>
protected Shapefile(FeatureType featureType)
: base(featureType)
{
}
/// <summary>
/// Creates a new instance of a shapefile based on a fileName
/// </summary>
/// <param name="fileName"></param>
protected Shapefile(string fileName)
{
base.Filename = fileName;
Configure();
}
private void Configure()
{
Attributes = new AttributeTable();
_header = new ShapefileHeader();
IndexMode = true;
}
#endregion
/// <summary>
/// The buffer size is an integer value in bytes specifying how large a piece of memory can be used at any one time.
/// Reading and writing from the disk is faster when done all at once. The larger this number the more effective
/// the disk management, but the more ram will be required (and the more likely to trip an out of memory error).
/// </summary>
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public int BufferSize
{
get { return _bufferSize; }
set { _bufferSize = value; }
}
/// <summary>
/// Gets whether or not the attributes have all been loaded into the data table.
/// </summary>
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public override bool AttributesPopulated
{
get
{
return _attributeTable.AttributesPopulated;
}
set
{
_attributeTable.AttributesPopulated = value;
}
}
/// <summary>
/// This re-directs the DataTable to work with the attribute Table instead.
/// </summary>
public override DataTable DataTable
{
get
{
return _attributeTable.Table;
}
set
{
_attributeTable.Table = value;
}
}
/// <summary>
/// A general header structure that stores some basic information about the shapefile.
/// </summary>
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public ShapefileHeader Header
{
get { return _header; }
set { _header = value; }
}
/// <summary>
/// Gets or sets the attribute Table used by this shapefile.
/// </summary>
[Browsable(false), DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public AttributeTable Attributes
{
get { return _attributeTable; }
set
{
if (_attributeTable == value) return;
if (_attributeTable != null)
{
_attributeTable.AttributesFilled -= AttributeTableAttributesFilled;
}
_attributeTable = value;
if (_attributeTable != null)
{
_attributeTable.AttributesFilled += AttributeTableAttributesFilled;
}
}
}
/// <summary>
/// Gets the count of members that match the expression
/// </summary>
/// <param name="expressions">The string expression to test</param>
/// <param name="progressHandler">THe progress handler that can also cancel the counting</param>
/// <param name="maxSampleSize">The integer maximum sample size from which to draw counts. If this is negative, it will not be used.</param>
/// <returns>The integer count of the members that match the expression.</returns>
public override int[] GetCounts(string[] expressions, ICancelProgressHandler progressHandler, int maxSampleSize)
{
if (AttributesPopulated) return base.GetCounts(expressions, progressHandler, maxSampleSize);
int[] counts = new int[expressions.Length];
// The most common case would be no filter expression, in which case the count is simply the number of shapes.
bool requiresRun = false;
for (int iex = 0; iex < expressions.Length; iex++)
{
if (!string.IsNullOrEmpty(expressions[iex]))
{
requiresRun = true;
}
else
{
counts[iex] = NumRows();
}
}
if (!requiresRun) return counts;
AttributePager ap = new AttributePager(this, 5000);
ProgressMeter pm = new ProgressMeter(progressHandler, "Calculating Counts", ap.NumPages());
// Don't bother to use a sampling approach if the number of rows is on the same order of magnitude as the number of samples.
if (maxSampleSize > 0 && maxSampleSize < NumRows() / 2)
{
DataTable sample = new DataTable();
sample.Columns.AddRange(GetColumns());
Dictionary<int, int> usedRows = new Dictionary<int, int>();
int samplesPerPage = maxSampleSize / ap.NumPages();
Random rnd = new Random(DateTime.Now.Millisecond);
for (int page = 0; page < ap.NumPages(); page++)
{
for (int i = 0; i < samplesPerPage; i++)
{
int row;
do
{
row = rnd.Next(ap.StartIndex, ap.StartIndex + ap.PageSize);
} while (usedRows.ContainsKey(row));
usedRows.Add(row, row);
sample.Rows.Add(ap.Row(row).ItemArray);
}
ap.MoveNext();
pm.CurrentValue = page;
if (progressHandler.Cancel) break;
//Application.DoEvents();
}
for (int i = 0; i < expressions.Length; i++)
{
try
{
DataRow[] dr = sample.Select(expressions[i]);
counts[i] += dr.Length;
}
catch (Exception ex)
{
Debug.WriteLine(ex);
}
}
pm.Reset();
return counts;
}
for (int page = 0; page < ap.NumPages(); page++)
{
for (int i = 0; i < expressions.Length; i++)
{
DataRow[] dr = ap[page].Select(expressions[i]);
counts[i] += dr.Length;
}
pm.CurrentValue = page;
if (progressHandler.Cancel) break;
//Application.DoEvents();
}
pm.Reset();
return counts;
}
/// <summary>
/// This makes the assumption that the organization of the feature list has not
/// changed since loading the attribute content.
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void AttributeTableAttributesFilled(object sender, EventArgs e)
{
if (IndexMode) return;
for (int fid = 0; fid < Features.Count; fid++)
{
if (fid < _attributeTable.Table.Rows.Count)
{
Features[fid].DataRow = _attributeTable.Table.Rows[fid];
}
}
SetupFeatureLookup();
}
/// <summary>
/// This will return the correct shapefile type by reading the fileName.
/// </summary>
/// <param name="fileName">A string specifying the file with the extension .shp to open.</param>
/// <returns>A correct shapefile object which is exclusively for reading the .shp data</returns>
public static new Shapefile OpenFile(string fileName)
{
return OpenFile(fileName, DataManager.DefaultDataManager.ProgressHandler);
}
/// <summary>
/// This will return the correct shapefile type by reading the fileName.
/// </summary>
/// <param name="fileName">A string specifying the file with the extension .shp to open.</param>
/// <param name="progressHandler">receives progress messages and overrides the ProgressHandler on the DataManager.DefaultDataManager</param>
/// <returns>A correct shapefile object which is exclusively for reading the .shp data</returns>
public static new Shapefile OpenFile(string fileName, IProgressHandler progressHandler)
{
var ext = Path.GetExtension(fileName);
if (ext != null) ext = ext.ToLower();
if (ext != ".shp" && ext != ".shx" && ext != ".dbf")
throw new ArgumentException(String.Format("The file extension {0} is not supported by Shapefile data provider.", ext));
string name = Path.ChangeExtension(fileName, ".shp");
var head = new ShapefileHeader();
head.Open(name);
switch (head.ShapeType)
{
case ShapeType.MultiPatch:
throw new NotImplementedException("This shape type is not yet supported.");
case ShapeType.MultiPoint:
case ShapeType.MultiPointM:
case ShapeType.MultiPointZ:
var mpsf = new MultiPointShapefile();
mpsf.Open(name, progressHandler);
return mpsf;
case ShapeType.NullShape:
throw new NotImplementedException("This shape type is not yet supported.");
case ShapeType.Point:
case ShapeType.PointM:
case ShapeType.PointZ:
var psf = new PointShapefile();
psf.Open(name, progressHandler);
return psf;
case ShapeType.Polygon:
case ShapeType.PolygonM:
case ShapeType.PolygonZ:
var pgsf = new PolygonShapefile();
pgsf.Open(name, progressHandler);
return pgsf;
case ShapeType.PolyLine:
case ShapeType.PolyLineM:
case ShapeType.PolyLineZ:
var lsf = new LineShapefile();
lsf.Open(name, progressHandler);
return lsf;
}
return null;
}
/// <summary>
/// saves a single row to the data source.
/// </summary>
/// <param name="index">the integer row (or FID) index</param>
/// <param name="values">The object array holding the new values to store.</param>
public override void Edit(int index, Dictionary<string, object> values)
{
_attributeTable.Edit(index, values);
}
/// <summary>
/// saves a single row to the data source.
/// </summary>
/// <param name="index">the integer row (or FID) index</param>
/// <param name="values">The object array holding the new values to store.</param>
public override void Edit(int index, DataRow values)
{
_attributeTable.Edit(index, values);
}
/// <summary>
/// Saves the new row to the data source and updates the file with new content.
/// </summary>
/// <param name="values">The values organized against the dictionary of field names.</param>
public override void AddRow(Dictionary<string, object> values)
{
_attributeTable.AddRow(values);
}
/// <summary>
/// Saves the new row to the data source and updates the file with new content.
/// </summary>
/// <param name="values">The values organized against the dictionary of field names.</param>
public override void AddRow(DataRow values)
{
_attributeTable.AddRow(values);
}
/// <inheritdoc />
public override DataTable GetAttributes(int startIndex, int numRows)
{
return _attributeTable.SupplyPageOfData(startIndex, numRows);
}
/// <summary>
/// Converts a page of content from a DataTable format, saving it back to the source.
/// </summary>
/// <param name="startIndex">The 0 based integer index representing the first row in the file (corresponding to the 0 row of the data table)</param>
/// <param name="pageValues">The DataTable representing the rows to set. If the row count is larger than the dataset, this will add the rows instead.</param>
public new void SetAttributes(int startIndex, DataTable pageValues)
{
// overridden in sub-classes
_attributeTable.SetAttributes(startIndex, pageValues);
}
/// <inheritdoc />
public override List<IFeature> SelectByAttribute(string filterExpression)
{
if (!_attributeTable.AttributesPopulated)
{
_attributeTable.Fill(_attributeTable.NumRecords);
}
if (FeatureLookup.Count == 0)
{
SetupFeatureLookup();
}
return base.SelectByAttribute(filterExpression);
}
/// <summary>
/// Reads the attributes from the specified attribute Table.
/// </summary>
public override void FillAttributes()
{
_attributeTable.AttributesPopulated = false; // attributeTable.Table fills itself if attributes are not populated
DataTable = _attributeTable.Table;
base.AttributesPopulated = true;
// Link the data rows to the vectors in this object
}
/// <summary>
/// Sets up the feature lookup, if it has not been already.
/// </summary>
private void SetupFeatureLookup()
{
for (var i = 0; i < _attributeTable.Table.Rows.Count; i++)
{
Features[i].DataRow = _attributeTable.Table.Rows[i];
FeatureLookup[Features[i].DataRow] = Features[i];
}
}
/// <summary>
/// This doesn't rewrite the entire header or erase the existing content. This simply replaces the file length
/// in the file with the new file length. This is generally because we want to write the header first,
/// but don't know the total length of a new file until cycling through the entire file. It is easier, therefore
/// to update the length after editing.
/// </summary>
/// <param name="fileName">A string fileName</param>
/// <param name="length">The integer length of the file in 16-bit words</param>
public static void WriteFileLength(string fileName, int length)
{
byte[] headerData = new byte[28];
WriteBytes(headerData, 0, 9994, false); // Byte 0 File Code 9994 Integer Big
// Bytes 4 - 20 are unused
WriteBytes(headerData, 24, length, false); // Byte 24 File Length File Length Integer Big
using (var bw = new BinaryWriter(new FileStream(fileName, FileMode.Open)))
{
// Actually write our byte array to the file
bw.Write(headerData);
}
}
/// <summary>
/// Reads 4 bytes from the specified byte array starting with offset.
/// If IsBigEndian = true, then this flips the order of the byte values.
/// </summary>
/// <param name="value">An array of bytes that is at least 4 bytes in length from the startIndex</param>
/// <param name="startIndex">A 0 based integer index where the double value begins</param>
/// <param name="isLittleEndian">If this is true, then the order of the bytes is reversed before being converted to a double</param>
/// <returns>A double created from reading the byte array</returns>
public static int ToInt(byte[] value, ref int startIndex, bool isLittleEndian)
{
// Some systems run BigEndian by default, others run little endian.
// The parameter specifies the byte order that should exist on the file.
// The BitConverter tells us what our system will do by default.
if (isLittleEndian != BitConverter.IsLittleEndian)
{
// The byte order of the value to post doesn't match our system, so reverse the byte order.
byte[] flipBytes = new byte[4];
Array.Copy(value, startIndex, flipBytes, 0, 4);
Array.Reverse(flipBytes);
startIndex += 4;
return BitConverter.ToInt32(flipBytes, 0);
}
startIndex += 4;
return BitConverter.ToInt32(value, startIndex);
}
/// <summary>
/// Reads 8 bytes from the specified byte array starting with offset.
/// If IsBigEndian = true, then this flips the order of the byte values.
/// </summary>
/// <param name="value">An array of bytes that is at least 8 bytes in length from the startIndex</param>
/// <param name="startIndex">A 0 based integer index where the double value begins</param>
/// <param name="isLittleEndian">If this is true, then the order of the bytes is reversed before being converted to a double</param>
/// <returns>A double created from reading the byte array</returns>
public static double ToDouble(byte[] value, ref int startIndex, bool isLittleEndian)
{
// Some systems run BigEndian by default, others run little endian.
// The parameter specifies the byte order that should exist on the file.
// The BitConverter tells us what our system will do by default.
if (isLittleEndian != BitConverter.IsLittleEndian)
{
byte[] flipBytes = new byte[8];
Array.Copy(value, startIndex, flipBytes, 0, 8);
Array.Reverse(flipBytes);
startIndex += 8;
return BitConverter.ToDouble(flipBytes, 0);
}
startIndex += 8;
return BitConverter.ToDouble(value, startIndex);
}
/// <summary>
/// Converts the double value into bytes and inserts them starting at startIndex into the destArray
/// </summary>
/// <param name="destArray">A byte array where the values should be written</param>
/// <param name="startIndex">The starting index where the values should be inserted</param>
/// <param name="value">The double value to convert</param>
/// <param name="isLittleEndian">Specifies whether the value should be written as big or little endian</param>
public static void WriteBytes(byte[] destArray, int startIndex, double value, bool isLittleEndian)
{
// Some systems run BigEndian by default, others run little endian.
// The parameter specifies the byte order that should exist on the file.
// The BitConverter tells us what our system will do by default.
if (isLittleEndian != BitConverter.IsLittleEndian)
{
byte[] flipBytes = BitConverter.GetBytes(value);
Array.Reverse(flipBytes);
Array.Copy(flipBytes, 0, destArray, startIndex, 8);
}
else
{
byte[] flipBytes = BitConverter.GetBytes(value);
Array.Copy(flipBytes, 0, destArray, startIndex, 8);
}
}
/// <summary>
/// Converts the double value into bytes and inserts them starting at startIndex into the destArray.
/// This will correct this system's natural byte order to reflect what is required to match the
/// shapefiles specification.
/// </summary>
/// <param name="destArray">A byte array where the values should be written</param>
/// <param name="startIndex">The starting index where the values should be inserted</param>
/// <param name="value">The integer value to convert</param>
/// <param name="isLittleEndian">Specifies whether the value should be written as big or little endian</param>
public static void WriteBytes(byte[] destArray, int startIndex, int value, bool isLittleEndian)
{
// Some systems run BigEndian by default, others run little endian.
// The parameter specifies the byte order that should exist on the file.
// The BitConverter tells us what our system will do by default.
if (isLittleEndian != BitConverter.IsLittleEndian)
{
byte[] flipBytes = BitConverter.GetBytes(value);
Array.Reverse(flipBytes);
Array.Copy(flipBytes, 0, destArray, startIndex, 4);
}
else
{
byte[] flipBytes = BitConverter.GetBytes(value);
Array.Copy(flipBytes, 0, destArray, startIndex, 4);
}
}
/// <summary>
/// Reads the entire index file in order to get a breakdown of how shapes are broken up.
/// </summary>
/// <param name="fileName">A string fileName of the .shx file to read.</param>
/// <returns>A List of ShapeHeaders that give offsets and lengths so that reading can be optimized</returns>
public List<ShapeHeader> ReadIndexFile(string fileName)
{
string shxFilename = fileName;
string ext = Path.GetExtension(fileName);
if (ext != ".shx")
{
shxFilename = Path.ChangeExtension(fileName, ".shx");
}
if (shxFilename == null)
{
throw new NullReferenceException(DataStrings.ArgumentNull_S.Replace("%S", fileName));
}
if (File.Exists(shxFilename) == false)
{
throw new FileNotFoundException(DataStrings.FileNotFound_S.Replace("%S", fileName));
}
var fileLen = new FileInfo(shxFilename).Length;
if (fileLen == 100)
{
// the file is empty so we are done reading
return Enumerable.Empty<ShapeHeader>().ToList();
}
// Use a the length of the file to dimension the byte array
using (var bbReader = new FileStream(shxFilename, FileMode.Open, FileAccess.Read, FileShare.Read, 65536))
{
// Skip the header and begin reading from the first record
bbReader.Seek(100, SeekOrigin.Begin);
_header.ShxLength = (int)(fileLen / 2);
var length = (int)(fileLen - 100);
var numRecords = length / 8;
// Each record consists of 2 Big-endian integers for a total of 8 bytes.
// This will store the header elements that we read from the file.
var result = new List<ShapeHeader>(numRecords);
for (var i = 0; i < numRecords; i++)
{
result.Add(new ShapeHeader
{
Offset = bbReader.ReadInt32(Endian.BigEndian),
ContentLength = bbReader.ReadInt32(Endian.BigEndian),
});
}
return result;
}
}
/// <summary>
/// Ensures that the attribute Table will have information that matches the current Table of attribute information
/// </summary>
public void UpdateAttributes()
{
string newFile = Path.ChangeExtension(Filename, "dbf");
if (!AttributesPopulated)
{
if (File.Exists(Attributes.Filename))
{
if (newFile.Equals(Attributes.Filename))
{ // Already using existing file
}
else
{
if (File.Exists(newFile)) File.Delete(newFile);
File.Copy(Attributes.Filename, newFile);
Attributes.Filename = newFile;
}
return;
}
}
if (Attributes != null && Attributes.Table != null && Attributes.Table.Columns.Count > 0)
{
// The attributes have been loaded and will now replace the ones in the file.
}
else
{
// Only add an FID field if there are no attributes at all.
DataTable newTable = new DataTable();
newTable.Columns.Add("FID");
//for (int i = 0; i < Features.Count; i++)
//Added by [email protected] - Index mode has no attributes and no features - so Features.count is Null and so was not adding any rows and failing
int iNumRows = IndexMode ? ShapeIndices.Count : Features.Count;
for (int i = 0; i < iNumRows; i++)
{
DataRow dr = newTable.NewRow();
dr["FID"] = i;
newTable.Rows.Add(dr);
}
if (Attributes != null) Attributes.Table = newTable;
}
//System.Data.DataRow drtemp = Attributes.Table.Rows[0];
if (Attributes != null) Attributes.SaveAs(Path.ChangeExtension(Filename, "dbf"), true);
}
/// <summary>
/// This uses the fileName of this shapefile to read the prj file of the same name
/// and stores the result in the Projection class.
/// </summary>
public void ReadProjection()
{
string prjFile = Path.ChangeExtension(Filename, ".prj");
if (File.Exists(prjFile))
{
Projection = ProjectionInfo.Open(prjFile);
}
else
{
Projection = new ProjectionInfo();
}
}
/// <summary>
/// Automatically uses the fileName of this shapefile to save the projection
/// </summary>
public void SaveProjection()
{
string prjFile = Path.ChangeExtension(Filename, ".prj");
if (File.Exists(prjFile))
{
File.Delete(prjFile);
}
if (Projection != null) Projection.SaveAs(prjFile);
}
/// <summary>
/// Reads just the content requested in order to satisfy the paging ability of VirtualMode for the DataGridView
/// </summary>
/// <param name="startIndex">The integer lower page boundary</param>
/// <param name="numRows">The integer number of attribute rows to return for the page</param>
/// <param name="fieldNames">The list or array of fieldnames to return.</param>
/// <returns>A DataTable populated with data rows with only the specified values.</returns>
public override DataTable GetAttributes(int startIndex, int numRows, IEnumerable<string> fieldNames)
{
if (AttributesPopulated) return base.GetAttributes(startIndex, numRows, fieldNames);
DataTable result = new DataTable();
DataColumn[] columns = GetColumns();
// Always add FID in this paging scenario.
result.Columns.Add("FID", typeof(int));
foreach (string name in fieldNames)
{
foreach (var col in columns)
{
if (String.Equals(col.ColumnName, name, StringComparison.CurrentCultureIgnoreCase))
{
result.Columns.Add(col);
break;
}
}
}
for (int i = 0; i < numRows; i++)
{
DataRow dr = result.NewRow();
dr["FID"] = startIndex + i;
result.Rows.Add(dr);
}
// Most use cases with an expression use only one or two fieldnames. Tailor for better
// performance in that case, at the cost of performance in the "read all " case.
// The other overload without fieldnames specified is designed for that case.
foreach (string field in fieldNames)
{
if (field == "FID") continue;
object[] values = _attributeTable.SupplyPageOfData(startIndex, numRows, field);
for (int i = 0; i < numRows; i++)
{
result.Rows[i][field] = values[i];
}
}
return result;
}
/// <summary>
/// The number of rows
/// </summary>
/// <returns></returns>
public override int NumRows()
{
// overridden in sub-classes
return _attributeTable.NumRecords;
}
/// <summary>
/// This gets a copy of the actual internal list of columns.
/// This should never be used to make changes to the column collection.
/// </summary>
public override DataColumn[] GetColumns()
{
return _attributeTable.Columns
.Select(_ => (DataColumn) new Field(_.ColumnName, _.TypeCharacter, _.Length, _.DecimalCount))
.ToArray();
}
/// <summary>
/// Checks that shape file can be saved to given fileName.
/// </summary>
/// <param name="fileName">File name to save.</param>
/// <param name="overwrite">Overwrite file or not.</param>
protected void EnsureValidFileToSave(string fileName, bool overwrite)
{
string dir = Path.GetDirectoryName(fileName);
if (!string.IsNullOrEmpty(dir) && !Directory.Exists(dir))
{
Directory.CreateDirectory(dir);
}
else if (File.Exists(fileName))
{
if (fileName != Filename && overwrite == false) throw new ArgumentOutOfRangeException("fileName", "File exists and overwrite = False.");
File.Delete(fileName);
var shx = Path.ChangeExtension(fileName, ".shx");
if (File.Exists(shx)) File.Delete(shx);
}
}
/// <summary>
/// Saves header
/// </summary>
/// <param name="fileName">File to save.</param>
protected void HeaderSaveAs(string fileName)
{
InvalidateEnvelope();
Header.SetExtent(Extent);
Header.ShxLength = IndexMode ? ShapeIndices.Count * 4 + 50 : Features.Count * 4 + 50;
Header.SaveAs(fileName);
}
}
}
| |
//------------------------------------------------------------------------------
// <license file="Arguments.cs">
//
// The use and distribution terms for this software are contained in the file
// named 'LICENSE', which can be found in the resources directory of this
// distribution.
//
// By using this software in any fashion, you are agreeing to be bound by the
// terms of this license.
//
// </license>
//------------------------------------------------------------------------------
using System;
using EcmaScript.NET.Types;
namespace EcmaScript.NET
{
/// <summary> This class implements the "arguments" object.
///
/// See ECMA 10.1.8
///
/// </summary>
sealed class Arguments : IdScriptableObject
{
override public string ClassName
{
get
{
return "Arguments";
}
}
override protected internal int MaxInstanceId
{
get
{
return MAX_INSTANCE_ID;
}
}
public Arguments (BuiltinCall activation)
{
this.activation = activation;
IScriptable parent = activation.ParentScope;
ParentScope = parent;
SetPrototype (ScriptableObject.GetObjectPrototype (parent));
args = activation.originalArgs;
lengthObj = (int)args.Length;
BuiltinFunction f = activation.function;
calleeObj = f;
Context.Versions version = f.LanguageVersion;
if (version <= Context.Versions.JS1_3 && version != Context.Versions.Default) {
callerObj = null;
}
else {
callerObj = UniqueTag.NotFound;
}
}
public override bool Has (int index, IScriptable start)
{
if (0 <= index && index < args.Length) {
if (args [index] != UniqueTag.NotFound) {
return true;
}
}
return base.Has (index, start);
}
public override object Get (int index, IScriptable start)
{
if (0 <= index && index < args.Length) {
object value = args [index];
if (value != UniqueTag.NotFound) {
if (sharedWithActivation (index)) {
BuiltinFunction f = activation.function;
string argName = f.getParamOrVarName (index);
value = activation.Get (argName, activation);
if (value == UniqueTag.NotFound)
Context.CodeBug ();
}
return value;
}
}
return base.Get (index, start);
}
private bool sharedWithActivation (int index)
{
BuiltinFunction f = activation.function;
int definedCount = f.ParamCount;
if (index < definedCount) {
// Check if argument is not hidden by later argument with the same
// name as hidden arguments are not shared with activation
if (index < definedCount - 1) {
string argName = f.getParamOrVarName (index);
for (int i = index + 1; i < definedCount; i++) {
if (argName.Equals (f.getParamOrVarName (i))) {
return false;
}
}
}
return true;
}
return false;
}
public override object Put (int index, IScriptable start, object value)
{
if (0 <= index && index < args.Length) {
if (args [index] != UniqueTag.NotFound) {
if (sharedWithActivation (index)) {
string argName;
argName = activation.function.getParamOrVarName (index);
return activation.Put (argName, activation, value);
}
lock (this) {
if (args [index] != UniqueTag.NotFound) {
if (args == activation.originalArgs) {
args = new object [args.Length];
args.CopyTo (args, 0);
}
args [index] = value;
return value;
}
}
}
}
return base.Put (index, start, value);
}
public override void Delete (int index)
{
if (0 <= index && index < args.Length) {
lock (this) {
if (args [index] != UniqueTag.NotFound) {
if (args == activation.originalArgs) {
args = new object [args.Length];
args.CopyTo (args, 0);
}
args [index] = UniqueTag.NotFound;
return;
}
}
}
base.Delete (index);
}
#region InstanceIds
private const int Id_callee = 1;
private const int Id_length = 2;
private const int Id_caller = 3;
private const int MAX_INSTANCE_ID = 3;
#endregion
protected internal override int FindInstanceIdInfo (string s)
{
int id;
#region Generated InstanceId Switch
L0: {
id = 0;
string X = null;
int c;
if (s.Length == 6) {
c = s [5];
if (c == 'e') { X = "callee"; id = Id_callee; }
else if (c == 'h') { X = "length"; id = Id_length; }
else if (c == 'r') { X = "caller"; id = Id_caller; }
}
if (X != null && X != s && !X.Equals (s))
id = 0;
}
EL0:
#endregion
if (id == 0)
return base.FindInstanceIdInfo (s);
int attr;
switch (id) {
case Id_callee:
case Id_caller:
case Id_length:
attr = DONTENUM;
break;
default:
throw new ApplicationException ();
}
return InstanceIdInfo (attr, id);
}
// #/string_id_map#
protected internal override string GetInstanceIdName (int id)
{
switch (id) {
case Id_callee:
return "callee";
case Id_length:
return "length";
case Id_caller:
return "caller";
}
return null;
}
protected internal override object GetInstanceIdValue (int id)
{
switch (id) {
case Id_callee:
return calleeObj;
case Id_length:
return lengthObj;
case Id_caller: {
object value = callerObj;
if (value == UniqueTag.NullValue) {
value = null;
}
else if (value == null) {
BuiltinCall caller = activation.parentActivationCall;
if (caller != null) {
value = caller.Get ("arguments", caller);
}
else {
value = null;
}
}
return value;
}
}
return base.GetInstanceIdValue (id);
}
protected internal override void SetInstanceIdValue (int id, object value)
{
switch (id) {
case Id_callee:
calleeObj = value;
return;
case Id_length:
lengthObj = value;
return;
case Id_caller:
callerObj = (value != null) ? value : UniqueTag.NullValue;
return;
}
base.SetInstanceIdValue (id, value);
}
internal override object [] GetIds (bool getAll)
{
object [] ids = base.GetIds (getAll);
if (getAll && args.Length != 0) {
bool [] present = null;
int extraCount = args.Length;
for (int i = 0; i != ids.Length; ++i) {
object id = ids [i];
if (id is int) {
int index = ((int)id);
if (0 <= index && index < args.Length) {
if (present == null) {
present = new bool [args.Length];
}
if (!present [index]) {
present [index] = true;
extraCount--;
}
}
}
}
if (extraCount != 0) {
object [] tmp = new object [extraCount + ids.Length];
Array.Copy (ids, 0, tmp, extraCount, ids.Length);
ids = tmp;
int offset = 0;
for (int i = 0; i != args.Length; ++i) {
if (present == null || !present [i]) {
ids [offset] = (int)i;
++offset;
}
}
if (offset != extraCount)
Context.CodeBug ();
}
}
return ids;
}
// Fields to hold caller, callee and length properties,
// where NOT_FOUND value tags deleted properties.
// In addition if callerObj == NullValue, it tags null for scripts, as
// initial callerObj == null means access to caller arguments available
// only in JS <= 1.3 scripts
private object callerObj;
private object calleeObj;
private object lengthObj;
private BuiltinCall activation;
// Initially args holds activation.getOriginalArgs(), but any modification
// of its elements triggers creation of a copy. If its element holds NOT_FOUND,
// it indicates deleted index, in which case super class is queried.
private object [] args;
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
[ExecuteInEditMode]
public class OTSounds : MonoBehaviour {
public bool mute = false;
public float volume = 1f;
static OTSounds _instance;
public static OTSounds instance
{
get
{
return _instance;
}
}
public OTSoundClip[] soundClips;
public Dictionary<string,OTSoundClip> lookup = new Dictionary<string, OTSoundClip>();
public List<OTSound> sounds = new List<OTSound>();
void Awake()
{
_instance = this;
if (Application.isPlaying)
{
if (soundClips!=null)
{
for (int i=0; i<soundClips.Length; i++)
{
soundClips[i].Load();
if (!lookup.ContainsKey(soundClips[i].name.ToLower()))
lookup.Add(soundClips[i].name.ToLower(),soundClips[i]);
}
}
}
}
// Update is called once per frame
void Update () {
if (volume<0) volume = 0;
if (volume>1) volume = 1;
if (Application.isPlaying)
{
float vol = (mute)?0:volume;
if (vol!=AudioListener.volume)
AudioListener.volume = vol;
}
int s = 0;
while (s<sounds.Count)
{
OTSound sound = sounds[s];
if (!sound.ready)
continue;
if (sound.tick == 0)
{
// lets skip first tick so sound will be started on 2nd tick
// and we have a tick to setup the sound
sound.tick++;
continue;
}
// lets start play the sound if no delay on this 2nd tick
if (sound.tick == 1 && !sound._boolMessage(OTSound.MessageType.HasDelay))
sound._boolMessage(OTSound.MessageType.Play);
sound.tick++;
if (sound._boolMessage(OTSound.MessageType.HasDelay))
{
sound._boolMessage(OTSound.MessageType.Wait,Time.deltaTime);
if (!sound._boolMessage(OTSound.MessageType.HasDelay))
sound._boolMessage(OTSound.MessageType.Play);
}
else
{
sound._boolMessage(OTSound.MessageType.Playing,Time.deltaTime);
if (!sound.isPlaying)
{
if (!sound._boolMessage(OTSound.MessageType.Repeat))
{
sound._boolMessage(OTSound.MessageType.Destroy);
continue;
}
}
else
{
if (sound._boolMessage(OTSound.MessageType.Expired))
{
sound._boolMessage(OTSound.MessageType.Destroy);
continue;
}
}
}
s++;
}
}
}
public class OTSound
{
public float time = 0;
public int tick = 0;
// sound settings
int count = 1;
float delay = 0;
float duration = 0;
float fadeIn = 0;
float fadeOut = 0;
// private attributes
OTSoundClip soundClip;
AudioSource source;
GameObject gameObject = null;
bool firstPlay = true;
int _count = 1;
float _delay = 0;
float _duration = 0;
float _volume = 1;
float _pan = 0;
float _pitch = 1;
string _name = "";
static List<OTSound> sounds
{
get
{
return OTSounds.instance.sounds;
}
}
public string name
{
get
{
return _name;
}
}
/// <summary>
/// Indicates if the sound is valid
/// </summary>
public bool valid
{
get
{
return (found && ready);
}
}
/// <summary>
/// Indicates if the sound was found (by name)
/// </summary>
public bool found
{
get
{
return (soundClip != null);
}
}
/// <summary>
/// Indicates if the sound is ready to be played
/// </summary>
public bool ready
{
get
{
return (soundClip != null && soundClip.ready && source!=null);
}
}
/// <summary>
/// Indicates if this sound is playing
/// </summary>
public bool isPlaying
{
get
{
return (ready && source.isPlaying);
}
}
void InitSound()
{
gameObject = new GameObject();
gameObject.name = "Orthello sound "+soundClip.name;
source = gameObject.AddComponent<AudioSource>();
tick = 0;
time = 0;
count = _count;
delay = _delay;
duration = _duration;
source.clip = soundClip.clip;
source.pan = _pan;
source.pitch = _pitch;
source.volume = _volume;
if (count == -1)
source.loop = true;
sounds.Add(this);
}
public OTSound(string name)
{
_name = name.ToLower();
if (OTSounds.instance.lookup.ContainsKey(_name))
soundClip = OTSounds.instance.lookup[_name];
if (soundClip !=null)
InitSound();
}
public enum MessageType { Expired, HasDelay, Wait, Playing, Play, Destroy, Repeat };
public bool _boolMessage(MessageType mt, float val)
{
if (!ready)
return false;
switch(mt)
{
case MessageType.Expired:
return (duration>0 && time > duration);
case MessageType.HasDelay:
return (delay > 0);
case MessageType.Wait:
delay -= val;
break;
case MessageType.Playing:
time += val;
if (fadeIn!=0)
{
if (time > fadeIn && time < duration - fadeOut)
{
if (source.volume!=1)
source.volume = 1;
}
else
if (time < fadeIn)
source.volume = (time / fadeIn) * _volume;
}
if (fadeOut!=0)
{
if (time > duration - fadeOut)
source.volume = _volume - ((time - (duration - fadeOut))/fadeOut);
}
break;
case MessageType.Play:
if (firstPlay)
{
firstPlay = false;
if (duration == 0 && count>-1)
duration = count * (source.clip.length / ((source.pitch>0)?source.pitch:1));
}
if (source.isPlaying)
source.Stop();
source.Play();
break;
case MessageType.Destroy:
if (source.isPlaying)
source.Stop();
if (gameObject!=null)
{
OT.Destroy(gameObject);
gameObject = null;
}
if (sounds.Contains(this))
sounds.Remove(this);
firstPlay = true;
break;
case MessageType.Repeat:
count--;
if (count == 0)
return false;
else
{
source.Play();
return true;
}
}
return true;
}
public bool _boolMessage(MessageType mt)
{
return _boolMessage(mt,0);
}
/// <summary>
/// Stop the sound
/// </summary>
public void Stop()
{
_boolMessage(MessageType.Destroy);
}
OTSound Clone()
{
OTSound s = new OTSound(_name);
s.Count(_count);
s.Pan(_pan);
s.Pitch(_pitch);
s.Delay(_delay);
s.Volume(_volume);
s.Duration(_duration);
s.FadeIn(fadeIn);
s.FadeOut(fadeOut);
return s;
}
/// <summary>
/// Plays the sound
/// </summary>
/// <param name='clone'>
/// If true, a new sound instance (clone) is launched and played
/// If flase, the current sound is played or stopped and re-played
/// </param>
public OTSound Play(bool clone)
{
if (clone)
{
OTSound s = Clone();
s.Play(false);
return s;
}
else
{
_boolMessage(MessageType.Destroy);
InitSound();
return this;
}
}
/// <summary>
/// (Re)-plays the sound
/// </summary>
public OTSound Play()
{
return Play(false);
}
/// <summary>
/// Plays the sound as a new instance (clone)
/// </summary>
public OTSound PlayClone()
{
return Play(true);
}
/// <summary>
/// Sets this sound in idle state
/// </summary>
public void Idle()
{
_boolMessage(MessageType.Destroy);
}
/// <summary>
/// Sets the sound source pitch value
/// </summary>
public OTSound Pitch(float val)
{
_pitch = val;
if (source!=null)
source.pitch = val;
return this;
}
/// <summary>
/// Sets the sound source pan value
/// </summary>
public OTSound Pan(float val)
{
_pan = val;
if (source!=null)
source.pan = val;
return this;
}
/// <summary>
/// Sets the number of times the sound will be played
/// </summary>
public OTSound Count(int val)
{
_count = val;
count = val;
if (val == -1 && source!=null)
source.loop = true;
return this;
}
/// <summary>
/// Sets sound volume
/// </summary>
public OTSound Volume(float val)
{
_volume = val;
if (source!=null)
source.volume = val;
return this;
}
/// <summary>
/// Sound will be looping
/// </summary>
public OTSound Loop()
{
return Count(-1);
}
/// <summary>
/// Sets sound delay
/// </summary>
public OTSound Delay(float val)
{
_delay = val;
delay = val;
return this;
}
/// <summary>
/// Sets volume FadeIn Time
/// </summary>
public OTSound FadeIn(float time)
{
fadeIn = time;
if (source!=null)
source.volume = 0;
return this;
}
/// <summary>
/// Sets volume FadeIn Time
/// </summary>
public OTSound FadeOut(float time)
{
fadeOut = time;
return this;
}
/// <summary>
/// Sets sound duration
/// </summary>
public OTSound Duration(float val)
{
_duration = val;
duration = val;
return this;
}
}
[System.Serializable]
public class OTSoundClip
{
public string name;
public AudioClip clip;
public string url = "";
public bool ready
{
get
{
if (url!="")
return clip.isReadyToPlay;
else
return true;
}
}
public void Load()
{
if (url!="")
{
WWW wwwRequest = new WWW(url);
clip = wwwRequest.GetAudioClip(false,true);
}
}
}
| |
using System;
using System.IO;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Reflection;
using System.Threading;
using MonoDevelop.Core;
using MonoDevelop.Projects;
using MonoDevelop.CodeAnalysis.Gui;
namespace MonoDevelop.CodeAnalysis {
/// <summary>
/// Class that encapsulates independent analyzers (Gendarme, Smokey), loads them
/// and performs analysis.
/// </summary>
static class MainAnalyzer {
private static bool is_busy = false;
private static DotNetProject current_project = null;
private static List<IAnalyzer> analyzers;
static void LoadAnalyzersIfNeccessary ()
{
if (analyzers == null)
LoadAnalyzers ();
}
/// <summary>
/// This method loads available analyzers from the same directory
/// where current assembly is located (most probably, MD AddIns directory).
/// Every analyzer plugin must have a file name like
/// 'MonoDevelop.CodeAnalysis.*.dll' in order to be loaded.
/// Also each plugin assembly must have a [AssemblyAnalyzer] attribute
/// applied, pointing to the concrete class implementing IAnalyzer.
///
/// example: MonoDevelop.CodeAnalysis.Gendarme.dll has
/// [assembly:AssemblyAnalyzer (typeof (GendarmeAnalyzer))]
/// </summary>
static void LoadAnalyzers ()
{
analyzers = new List<IAnalyzer> ();
string path = Path.GetDirectoryName (Assembly.GetExecutingAssembly ().Location);
foreach (string dll in Directory.GetFiles (path, "MonoDevelop.CodeAnalysis.*.dll", SearchOption.TopDirectoryOnly)) {
try {
Assembly lib = Assembly.LoadFile (dll);
IAnalyzer analyzer = CreateAnalyzer (lib);
if (analyzer != null)
analyzers.Add (analyzer);
} catch {
continue;
}
}
}
/// <summary>
/// Creates and returns analyzer (if any) for a plugin assembly.
/// </summary>
static IAnalyzer CreateAnalyzer (Assembly library)
{
object [] attrs = library.GetCustomAttributes (typeof (AssemblyAnalyzerAttribute), false);
if (attrs.Length == 0)
return null;
Type Type = ((AssemblyAnalyzerAttribute) attrs [0]).Type;
return (IAnalyzer) Activator.CreateInstance (Type);
}
/// <value>
/// Indicates if the analyzer is busy now and cannot perform another analysis.
/// </value>
public static bool IsBusy {
get { return is_busy; }
}
/// <value>
/// Points to the current project being analyzed (if any).
/// </value>
public static DotNetProject CurrentProject {
get { return current_project; }
}
/// <summary>
/// Indicates if analyzer can handle specified type of entry.
/// </summary>
public static bool CanAnalyze (object entry)
{
return entry is DotNetProject || entry is Solution;
}
/// <summary>
/// Begins analysis operation (and starts new thread) for the specified entry.
/// </summary>
public static void BeginAnalysis (SolutionItem entry)
{
if (is_busy) // this should never be true (lock GUI, etc)
throw new InvalidOperationException ();
is_busy = true;
Thread thread = new Thread (DoAnalyze);
thread.IsBackground = true;
thread.Start (entry);
}
/// <summary>
/// Starts main analysis and reports the results.
/// </summary>
static void DoAnalyze (object param)
{
SolutionItem entry = param as SolutionItem;
try {
ResultsReporter.AnalysisStarted (entry.Name);
AnalyzeCombineEntry (entry, 1.0);
} catch (CodeAnalysisException ex) {
ResultsReporter.ReportError (ex);
} finally {
is_busy = false;
ResultsReporter.AnalysisFinished ();
}
}
/// <summary>
/// Runs analysis on a combine entry, if applicable.
/// </summary>
static void AnalyzeCombineEntry (object entry, double work)
{
if (entry is Solution)
AnalyzeCombine ((Solution)entry, work);
else if (entry is DotNetProject)
AnalyzeProject ((DotNetProject)entry, work);
}
/// <summary>
/// Enumerates each entry in a combine and runs analysis for it.
/// </summary>
static void AnalyzeCombine (Solution combine, double work)
{
List<SolutionItem> entriesToAnalyze = new List<SolutionItem> ();
ReadOnlyCollection<SolutionItem> children = combine.GetAllSolutionItems ();
foreach (object child in children)
if (child != combine && CanAnalyze (child))
entriesToAnalyze.Add (child as SolutionItem);
double entryWork = work / entriesToAnalyze.Count;
foreach (SolutionItem childEntry in entriesToAnalyze) {
AnalyzeCombineEntry (childEntry, entryWork);
}
}
/// <summary>
/// Performs analysis on specified project and reports violations to GUI.
/// </summary>
static void AnalyzeProject (DotNetProject project, double work)
{
current_project = project;
try {
IEnumerable<IViolation> violations = RunAnalyzers (current_project, work);
ResultsReporter.ReportViolations (violations);
} catch (CodeAnalysisException) {
throw;
} catch (Exception ex) {
throw new CodeAnalysisException (AddinCatalog.GetString ("Analysis failed because of unexpected error: {0}. Please, contact the plugin developers.", ex), ex);
} finally {
current_project = null;
}
}
/// <summary>
/// Determines which analyzers to run, which rule sets to use (TODO)
/// and invokes the runners.
/// </summary>
static IEnumerable<IViolation> RunAnalyzers (DotNetProject project, double work)
{
string dll = project.GetOutputFileName (ConfigurationSelector.Default);
if (!File.Exists (dll))
yield break;
LoadAnalyzersIfNeccessary ();
if (analyzers.Count == 0)
yield break;
double analyzerWork = work / analyzers.Count;
foreach (IAnalyzer analyzer in analyzers) {
IEnumerable<IRule> ruleSet = GetRuleSet (project, analyzer.GetRuleLoader ());
IRunner runner = analyzer.GetRunner ();
IEnumerable<IViolation> violations = runner.Run (dll, ruleSet);
foreach (IViolation vio in violations)
yield return vio;
ResultsReporter.WorkComplete += analyzerWork;
}
}
/// <summary>
/// Gets rule set for specified project (TODO: read project configuration).
/// </summary>
static IEnumerable<IRule> GetRuleSet (DotNetProject project, IRuleLoader ruleLoader)
{
// TODO: retrieve rule set from project configuration
return ruleLoader.GetRules ();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
namespace Internal.Runtime
{
/// <summary>
/// Represents the flags stored in the <c>_usFlags</c> field of a <c>System.Runtime.EEType</c>.
/// </summary>
[Flags]
internal enum EETypeFlags : ushort
{
/// <summary>
/// There are four kinds of EETypes, defined in <c>Kinds</c>.
/// </summary>
EETypeKindMask = 0x0003,
/// <summary>
/// This flag is set when m_RelatedType is in a different module. In that case, _pRelatedType
/// actually points to an IAT slot in this module, which then points to the desired EEType in the
/// other module. In other words, there is an extra indirection through m_RelatedType to get to
/// the related type in the other module. When this flag is set, it is expected that you use the
/// "_ppXxxxViaIAT" member of the RelatedTypeUnion for the particular related type you're
/// accessing.
/// </summary>
RelatedTypeViaIATFlag = 0x0004,
/// <summary>
/// This EEType represents a value type.
/// </summary>
ValueTypeFlag = 0x0008,
/// <summary>
/// This EEType represents a type which requires finalization.
/// </summary>
HasFinalizerFlag = 0x0010,
/// <summary>
/// This type contain GC pointers.
/// </summary>
HasPointersFlag = 0x0020,
/// <summary>
/// Type implements ICastable to allow dynamic resolution of interface casts.
/// </summary>
ICastableFlag = 0x0040,
/// <summary>
/// This type is generic and one or more of its type parameters is co- or contra-variant. This
/// only applies to interface and delegate types.
/// </summary>
GenericVarianceFlag = 0x0080,
/// <summary>
/// This type has optional fields present.
/// </summary>
OptionalFieldsFlag = 0x0100,
/// <summary>
/// This EEType represents an interface.
/// </summary>
IsInterfaceFlag = 0x0200,
/// <summary>
/// This type is generic.
/// </summary>
IsGenericFlag = 0x0400,
/// <summary>
/// We are storing a CorElementType in the upper bits for unboxing enums.
/// </summary>
CorElementTypeMask = 0xf800,
CorElementTypeShift = 11,
/// <summary>
/// Single mark to check TypeKind and two flags. When non-zero, casting is more complicated.
/// </summary>
ComplexCastingMask = EETypeKindMask | RelatedTypeViaIATFlag | GenericVarianceFlag
};
internal enum EETypeKind : ushort
{
/// <summary>
/// Represents a standard ECMA type
/// </summary>
CanonicalEEType = 0x0000,
/// <summary>
/// Represents a type cloned from another EEType
/// </summary>
ClonedEEType = 0x0001,
/// <summary>
/// Represents a paramaterized type. For example a single dimensional array or pointer type
/// </summary>
ParameterizedEEType = 0x0002,
/// <summary>
/// Represents an uninstantiated generic type definition
/// </summary>
GenericTypeDefEEType = 0x0003,
}
/// <summary>
/// These are flag values that are rarely set for types. If any of them are set then an optional field will
/// be associated with the EEType to represent them.
/// </summary>
[Flags]
internal enum EETypeRareFlags : int
{
/// <summary>
/// This type requires 8-byte alignment for its fields on certain platforms (only ARM currently).
/// </summary>
RequiresAlign8Flag = 0x00000001,
/// <summary>
/// Type implements ICastable to allow dynamic resolution of interface casts.
/// </summary>
UNUSED1 = 0x00000002,
/// <summary>
/// Type is an instantiation of Nullable<T>.
/// </summary>
IsNullableFlag = 0x00000004,
/// <summary>
/// Nullable target type stashed in the EEType is indirected via the IAT.
/// </summary>
NullableTypeViaIATFlag = 0x00000008,
/// <summary>
/// This EEType was created by generic instantiation loader
/// </summary>
IsDynamicTypeFlag = 0x00000010,
/// <summary>
/// This EEType has a Class Constructor
/// </summary>
HasCctorFlag = 0x0000020,
/// <summary>
/// Old unused flag
/// </summary>
UNUSED2 = 0x00000040,
/// <summary>
/// This EEType was constructed from a universal canonical template, and has
/// its own dynamically created DispatchMap (does not use the DispatchMap of its template type)
/// </summary>
HasDynamicallyAllocatedDispatchMapFlag = 0x00000080,
/// <summary>
/// This EEType represents a structure that is an HFA
/// </summary>
IsHFAFlag = 0x00000100,
/// <summary>
/// This EEType has sealed vtable entries
/// </summary>
HasSealedVTableEntriesFlag = 0x00000200,
/// <summary>
/// This dynamically created types has gc statics
/// </summary>
IsDynamicTypeWithGcStatics = 0x00000400,
/// <summary>
/// This dynamically created types has non gc statics
/// </summary>
IsDynamicTypeWithNonGcStatics = 0x00000800,
/// <summary>
/// This dynamically created types has thread statics
/// </summary>
IsDynamicTypeWithThreadStatics = 0x00001000,
/// <summary>
/// This EEType contains a pointer to dynamic module information
/// </summary>
HasDynamicModuleFlag = 0x00002000,
/// <summary>
/// This EEType is an abstract class (but not an interface).
/// </summary>
IsAbstractClassFlag = 0x00004000,
/// <summary>
/// This EEType is for a Byref-like class (TypedReference, Span<T>,...)
/// </summary>
IsByRefLikeFlag = 0x00008000,
}
internal enum EETypeField
{
ETF_InterfaceMap,
ETF_Finalizer,
ETF_OptionalFieldsPtr,
ETF_NullableType,
ETF_SealedVirtualSlots,
ETF_DynamicTemplateType,
ETF_DynamicDispatchMap,
ETF_DynamicModule,
ETF_GenericDefinition,
ETF_GenericComposition,
ETF_DynamicGcStatics,
ETF_DynamicNonGcStatics,
ETF_DynamicThreadStaticOffset,
}
internal enum CorElementType
{
ELEMENT_TYPE_END = 0x00,
ELEMENT_TYPE_VOID = 0x1,
ELEMENT_TYPE_BOOLEAN = 0x2,
ELEMENT_TYPE_CHAR = 0x3,
ELEMENT_TYPE_I1 = 0x4,
ELEMENT_TYPE_U1 = 0x5,
ELEMENT_TYPE_I2 = 0x6,
ELEMENT_TYPE_U2 = 0x7,
ELEMENT_TYPE_I4 = 0x8,
ELEMENT_TYPE_U4 = 0x9,
ELEMENT_TYPE_I8 = 0xa,
ELEMENT_TYPE_U8 = 0xb,
ELEMENT_TYPE_R4 = 0xc,
ELEMENT_TYPE_R8 = 0xd,
ELEMENT_TYPE_STRING = 0xe,
ELEMENT_TYPE_PTR = 0xf,
ELEMENT_TYPE_BYREF = 0x10,
ELEMENT_TYPE_VALUETYPE = 0x11,
ELEMENT_TYPE_CLASS = 0x12,
ELEMENT_TYPE_ARRAY = 0x14,
ELEMENT_TYPE_TYPEDBYREF = 0x16,
ELEMENT_TYPE_I = 0x18,
ELEMENT_TYPE_U = 0x19,
ELEMENT_TYPE_FNPTR = 0x1b,
ELEMENT_TYPE_OBJECT = 0x1c,
ELEMENT_TYPE_SZARRAY = 0x1d,
}
internal enum EETypeOptionalFieldTag : byte
{
/// <summary>
/// Extra <c>EEType</c> flags not commonly used such as HasClassConstructor
/// </summary>
RareFlags,
/// <summary>
/// VTable slot of <see cref="ICastable.IsInstanceOfInterface"/> for direct invocation without interface dispatch overhead
/// </summary>
ICastableIsInstSlot,
/// <summary>
/// Index of the dispatch map pointer in the DispathMap table
/// </summary>
DispatchMap,
/// <summary>
/// Padding added to a value type when allocated on the GC heap
/// </summary>
ValueTypeFieldPadding,
/// <summary>
/// VTable slot of <see cref="ICastable.GetImplType"/> for direct invocation without interface dispatch overhead
/// </summary>
ICastableGetImplTypeSlot,
/// <summary>
/// Offset in Nullable<T> of the value field
/// </summary>
NullableValueOffset,
// Number of field types we support
Count
}
// Keep this synchronized with GenericVarianceType in rhbinder.h.
internal enum GenericVariance : byte
{
NonVariant = 0,
Covariant = 1,
Contravariant = 2,
ArrayCovariant = 0x20,
}
internal static class ParameterizedTypeShapeConstants
{
// NOTE: Parameterized type kind is stored in the BaseSize field of the EEType.
// Array types use their actual base size. Pointer and ByRef types are never boxed,
// so we can reuse the EEType BaseSize field to indicate the kind.
// It's important that these values always stay lower than any valid value of a base
// size for an actual array.
public const int Pointer = 0;
public const int ByRef = 1;
}
internal static class StringComponentSize
{
public const int Value = sizeof(char);
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#if !SILVERLIGHT
using System;
using System.Collections.Generic;
using System.Text;
using System.Diagnostics.Contracts;
namespace System.Net.NetworkInformation {
// Summary:
// Specifies the Internet Protocol versions that are supported by a network
// interface.
public enum NetworkInterfaceComponent
{
// Summary:
// Internet Protocol version 4.
IPv4 = 0,
//
// Summary:
// Internet Protocol version 6.
IPv6 = 1,
}
// Summary:
// Specifies the operational state of a network interface.
public enum OperationalStatus
{
// Summary:
// The network interface is up; it can transmit data packets.
Up = 1,
//
// Summary:
// The network interface is unable to transmit data packets.
Down = 2,
//
// Summary:
// The network interface is running tests.
Testing = 3,
//
// Summary:
// The network interface status is not known.
Unknown = 4,
//
// Summary:
// The network interface is not in a condition to transmit data packets; it
// is waiting for an external event.
Dormant = 5,
//
// Summary:
// The network interface is unable to transmit data packets because of a missing
// component, typically a hardware component.
NotPresent = 6,
//
// Summary:
// The network interface is unable to transmit data packets because it runs
// on top of one or more other interfaces, and at least one of these "lower
// layer" interfaces is down.
LowerLayerDown = 7,
}
// Summary:
// Specifies types of network interfaces.
public enum NetworkInterfaceType
{
// Summary:
// The interface type is not known.
Unknown = 1,
//
// Summary:
// The network interface uses an Ethernet connection. Ethernet is defined in
// IEEE standard 802.3.
Ethernet = 6,
//
// Summary:
// The network interface uses a Token-Ring connection. Token-Ring is defined
// in IEEE standard 802.5.
TokenRing = 9,
//
// Summary:
// The network interface uses a Fiber Distributed Data Interface (FDDI) connection.
// FDDI is a set of standards for data transmission on fiber optic lines in
// a local area network.
Fddi = 15,
//
// Summary:
// The network interface uses a basic rate interface Integrated Services Digital
// Network (ISDN) connection. ISDN is a set of standards for data transmission
// over telephone lines.
BasicIsdn = 20,
//
// Summary:
// The network interface uses a primary rate interface Integrated Services Digital
// Network (ISDN) connection. ISDN is a set of standards for data transmission
// over telephone lines.
PrimaryIsdn = 21,
//
// Summary:
// The network interface uses a Point-To-Point protocol (PPP) connection. PPP
// is a protocol for data transmission using a serial device.
Ppp = 23,
//
// Summary:
// The network interface is a loopback adapter. Such interfaces are used primarily
// for testing; no traffic is sent.
Loopback = 24,
//
// Summary:
// The network interface uses an Ethernet 3 megabit/second connection. This
// version of Ethernet is defined in IETF RFC 895.
Ethernet3Megabit = 26,
//
// Summary:
// The network interface uses a Serial Line Internet Protocol (SLIP) connection.
// SLIP is defined in IETF RFC 1055.
Slip = 28,
//
// Summary:
// The network interface uses asynchronous transfer mode (ATM) for data transmission.
Atm = 37,
//
// Summary:
// The network interface uses a modem.
GenericModem = 48,
//
// Summary:
// The network interface uses a Fast Ethernet connection. Fast Ethernet provides
// a data rate of 100 megabits per second, known as 100BaseT.
FastEthernetT = 62,
//
// Summary:
// The network interface uses a connection configured for ISDN and the X.25
// protocol. X.25 allows computers on public networks to communicate using an
// intermediary computer.
Isdn = 63,
//
// Summary:
// The network interface uses a Fast Ethernet connection over optical fiber.
// This type of connection is also known as 100BaseFX.
FastEthernetFx = 69,
//
// Summary:
// The network interface uses a wireless LAN connection (IEEE 802.11 standard).
Wireless80211 = 71,
//
// Summary:
// The network interface uses an Asymmetric Digital Subscriber Line (ADSL).
AsymmetricDsl = 94,
//
// Summary:
// The network interface uses a Rate Adaptive Digital Subscriber Line (RADSL).
RateAdaptDsl = 95,
//
// Summary:
// The network interface uses a Symmetric Digital Subscriber Line (SDSL).
SymmetricDsl = 96,
//
// Summary:
// The network interface uses a Very High Data Rate Digital Subscriber Line
// (VDSL).
VeryHighSpeedDsl = 97,
//
// Summary:
// The network interface uses Internet Protocol (IP) in combination with asynchronous
// transfer mode (ATM) for data transmission.
IPOverAtm = 114,
//
// Summary:
// The network interface uses a gigabit Ethernet connection.
GigabitEthernet = 117,
//
// Summary:
// The network interface uses a tunnel connection.
Tunnel = 131,
//
// Summary:
// The network interface uses a Multirate Digital Subscriber Line.
MultiRateSymmetricDsl = 143,
//
// Summary:
// The network interface uses a High Performance Serial Bus.
HighPerformanceSerialBus = 144,
}
// Summary:
// Provides configuration and statistical information for a network interface.
public abstract class NetworkInterface {
// Summary:
// Initializes a new instance of the System.Net.NetworkInformation.NetworkInterface
// class.
extern protected NetworkInterface();
// Summary:
// Gets the description of the interface.
//
// Returns:
// A System.String that describes this interface.
public abstract string Description { get; }
//
// Summary:
// Gets the identifier of the network adapter.
//
// Returns:
// A System.String that contains the identifier.
public abstract string Id { get; }
//
// Summary:
// Gets a System.Boolean value that indicates whether the network interface
// is set to only receive data packets.
//
// Returns:
// true if the interface only receives network traffic; otherwise, false.
//
// Exceptions:
// System.PlatformNotSupportedException:
// This property is not valid on computers running operating systems earlier
// than Windows XP.
public abstract bool IsReceiveOnly { get; }
//
// Summary:
// Gets the index of the loopback interface.
//
// Returns:
// A System.Int32 that contains the index for the loopback interface.
extern public static int LoopbackInterfaceIndex { get; }
//
// Summary:
// Gets the name of the network adapter.
//
// Returns:
// A System.String that contains the adapter name.
public abstract string Name { get; }
//
// Summary:
// Gets the interface type.
//
// Returns:
// An System.Net.NetworkInformation.NetworkInterfaceType value that specifies
// the network interface type.
public abstract NetworkInterfaceType NetworkInterfaceType { get; }
//
// Summary:
// Gets the current operational state of the network connection.
//
// Returns:
// One of the System.Net.NetworkInformation.OperationalStatus values.
public abstract OperationalStatus OperationalStatus { get; }
//
// Summary:
// Gets the speed of the network interface.
//
// Returns:
// A System.Int64 value that specifies the speed in bits per second.
public abstract long Speed { get; }
//
// Summary:
// Gets a System.Boolean value that indicates whether the network interface
// is enabled to receive multicast packets.
//
// Returns:
// true if the interface receives multicast packets; otherwise, false.
//
// Exceptions:
// System.PlatformNotSupportedException:
// This property is not valid on computers running operating systems earlier
// than Windows XP.
public abstract bool SupportsMulticast { get; }
// Summary:
// Returns objects that describe the network interfaces on the local computer.
//
// Returns:
// A System.Net.NetworkInformation.NetworkInterface array that contains objects
// that describe the available network interfaces, or an empty array if no interfaces
// are detected.
//
// Exceptions:
// System.Net.NetworkInformation.NetworkInformationException:
// A Windows system function call failed.
[Pure]
extern public static NetworkInterface[] GetAllNetworkInterfaces();
//
// Summary:
// Returns an object that describes the configuration of this network interface.
//
// Returns:
// An System.Net.NetworkInformation.IPInterfaceProperties object that describes
// this network interface.
[Pure]
public abstract IPInterfaceProperties GetIPProperties();
//
// Summary:
// Gets the IPv4 statistics.
//
// Returns:
// An System.Net.NetworkInformation.IPv4InterfaceStatistics object.
[Pure]
public abstract IPv4InterfaceStatistics GetIPv4Statistics();
//
// Summary:
// Indicates whether any network connection is available.
//
// Returns:
// true if a network connection is available; otherwise, false.
[Pure]
extern public static bool GetIsNetworkAvailable();
//
// Summary:
// Returns the Media Access Control (MAC) address for this adapter.
//
// Returns:
// A System.Net.NetworkInformation.PhysicalAddress object that contains the
// physical address.
[Pure]
public abstract PhysicalAddress GetPhysicalAddress();
//
// Summary:
// Gets a System.Boolean value that indicates whether the interface supports
// the specified protocol.
//
// Parameters:
// networkInterfaceComponent:
// A System.Net.NetworkInformation.NetworkInterfaceComponent value.
//
// Returns:
// true if the specified protocol is supported; otherwise, false.
[Pure]
public abstract bool Supports(NetworkInterfaceComponent networkInterfaceComponent);
}
}
#endif
| |
/*
Project Orleans Cloud Service SDK ver. 1.0
Copyright (c) Microsoft Corporation
All rights reserved.
MIT License
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and
associated documentation files (the ""Software""), to deal in the Software without restriction,
including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense,
and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so,
subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace Orleans.Runtime
{
internal interface IConsistentRingProviderForGrains
{
/// <summary>
/// Get the responsibility range of the current silo
/// </summary>
/// <returns></returns>
IRingRange GetMyRange();
/// <summary>
/// Subscribe to receive range change notifications
/// </summary>
/// <param name="observer">An observer interface to receive range change notifications.</param>
/// <returns>bool value indicating that subscription succeeded or not.</returns>
bool SubscribeToRangeChangeEvents(IAsyncRingRangeListener observer);
/// <summary>
/// Unsubscribe from receiving range change notifications
/// </summary>
/// <param name="observer">An observer interface to receive range change notifications.</param>
/// <returns>bool value indicating that unsubscription succeeded or not</returns>
bool UnSubscribeFromRangeChangeEvents(IAsyncRingRangeListener observer);
}
// This has to be a separate interface, not polymorphic with IRingRangeListener,
// since IRingRangeListener is implemented by SystemTarget and thus if it becomes grain interface
// it would need to be system target interface (with SiloAddress as first argument).
internal interface IAsyncRingRangeListener
{
Task RangeChangeNotification(IRingRange old, IRingRange now);
}
// This is the public interface to be used by the consistent ring
public interface IRingRange
{
/// <summary>
/// Check if <paramref name="n"/> is our responsibility to serve
/// </summary>
/// <param name="id"></param>
/// <returns>true if the reminder is in our responsibility range, false otherwise</returns>
bool InRange(uint n);
bool InRange(GrainReference grainReference);
string ToFullString();
}
// This is the internal interface to be used only by the different range implementations.
internal interface IRingRangeInternal : IRingRange
{
long RangeSize();
double RangePercentage();
}
[Serializable]
internal class SingleRange : IRingRangeInternal, IEquatable<SingleRange>
{
private readonly uint begin;
private readonly uint end;
/// <summary>
/// Exclusive
/// </summary>
public uint Begin { get { return begin; } }
/// <summary>
/// Inclusive
/// </summary>
public uint End { get { return end; } }
public SingleRange(uint begin, uint end)
{
this.begin = begin;
this.end = end;
}
public bool InRange(GrainReference grainReference)
{
return InRange(grainReference.GetUniformHashCode());
}
/// <summary>
/// checks if n is element of (Begin, End], while remembering that the ranges are on a ring
/// </summary>
/// <param name="n"></param>
/// <returns>true if n is in (Begin, End], false otherwise</returns>
public bool InRange(uint n)
{
uint num = n;
if (begin < end)
{
return num > begin && num <= end;
}
// Begin > End
return num > begin || num <= end;
}
public long RangeSize()
{
if (begin < end)
{
return end - begin;
}
return RangeFactory.RING_SIZE - (begin - end);
}
public double RangePercentage()
{
return ((double)RangeSize() / (double)RangeFactory.RING_SIZE) * ((double)100.0);
}
#region IEquatable<SingleRange> Members
public bool Equals(SingleRange other)
{
return other != null && begin == other.begin && end == other.end;
}
#endregion
public override bool Equals(object obj)
{
return Equals(obj as SingleRange);
}
public override int GetHashCode()
{
return (int)(begin ^ end);
}
public override string ToString()
{
if (begin == 0 && end == 0)
{
return String.Format("<(0 0], Size=x{0,8:X8}, %Ring={1:0.000}%>", RangeSize(), RangePercentage());
}
return String.Format("<(x{0,8:X8} x{1,8:X8}], Size=x{2,8:X8}, %Ring={3:0.000}%>", begin, end, RangeSize(), RangePercentage());
}
public string ToCompactString()
{
return ToString();
}
public string ToFullString()
{
return ToString();
}
}
internal static class RangeFactory
{
public const long RING_SIZE = ((long)uint.MaxValue) + 1;
public static IRingRange CreateFullRange()
{
return new SingleRange(0, 0);
}
public static IRingRange CreateRange(uint begin, uint end)
{
return new SingleRange(begin, end);
}
public static IRingRange CreateRange(List<IRingRange> inRanges)
{
return new GeneralMultiRange(inRanges);
}
public static IEnumerable<SingleRange> GetSubRanges(IRingRange range)
{
if (range is SingleRange)
{
return new SingleRange[] { (SingleRange)range };
}
else if (range is GeneralMultiRange)
{
return ((GeneralMultiRange)range).Ranges;
}
return null;
}
}
[Serializable]
internal class GeneralMultiRange : IRingRangeInternal
{
private readonly List<SingleRange> ranges;
private readonly long rangeSize;
private readonly double rangePercentage;
internal IEnumerable<SingleRange> Ranges { get { return ranges; } }
internal GeneralMultiRange(IEnumerable<IRingRange> inRanges)
{
ranges = inRanges.Cast<SingleRange>().ToList();
if (ranges.Count == 0)
{
rangeSize = 0;
rangePercentage = 0;
}
else
{
rangeSize = ranges.Sum(r => r.RangeSize());
rangePercentage = ranges.Sum(r => r.RangePercentage());
}
}
public bool InRange(uint n)
{
foreach (IRingRange s in Ranges)
{
if (s.InRange(n)) return true;
}
return false;
}
public bool InRange(GrainReference grainReference)
{
return InRange(grainReference.GetUniformHashCode());
}
public long RangeSize()
{
return rangeSize;
}
public double RangePercentage()
{
return rangePercentage;
}
public override string ToString()
{
return ToCompactString();
}
public string ToCompactString()
{
if (ranges.Count == 0) return "Empty MultiRange";
if (ranges.Count == 1) return ranges[0].ToString();
return String.Format("<MultiRange: Size=x{0,8:X8}, %Ring={1:0.000}%>", RangeSize(), RangePercentage());
}
public string ToFullString()
{
if (ranges.Count == 0) return "Empty MultiRange";
if (ranges.Count == 1) return ranges[0].ToString();
return String.Format("<MultiRange: Size=x{0,8:X8}, %Ring={1:0.000}%, {2} Ranges: {3}>", RangeSize(), RangePercentage(), ranges.Count, Utils.EnumerableToString(ranges, r => r.ToFullString()));
}
}
[Serializable]
internal class EquallyDevidedMultiRange
{
[Serializable]
private class EquallyDevidedSingleRange
{
private readonly List<SingleRange> ranges;
internal EquallyDevidedSingleRange(SingleRange singleRange, int numSubRanges)
{
ranges = new List<SingleRange>();
if (numSubRanges == 0) throw new ArgumentException("numSubRanges is 0.", "numSubRanges");
if (numSubRanges == 1)
{
ranges.Add(singleRange);
}
else
{
uint uNumSubRanges = checked((uint)numSubRanges);
uint portion = (uint)(singleRange.RangeSize() / uNumSubRanges);
uint remainder = (uint)(singleRange.RangeSize() - portion * uNumSubRanges);
uint start = singleRange.Begin;
for (uint i = 0; i < uNumSubRanges; i++)
{
// (Begin, End]
uint end = (unchecked(start + portion));
// I want it to overflow on purpose. It will do the right thing.
if (remainder > 0)
{
end++;
remainder--;
}
ranges.Add(new SingleRange(start, end));
start = end; // nextStart
}
}
}
internal SingleRange GetSubRange(int mySubRangeIndex)
{
return ranges[mySubRangeIndex];
}
}
private readonly Dictionary<int, IRingRangeInternal> multiRanges;
private readonly long rangeSize;
private readonly double rangePercentage;
// This class takes a range and devides it into X (x being numSubRanges) equal ranges.
public EquallyDevidedMultiRange(IRingRange range, int numSubRanges)
{
multiRanges = new Dictionary<int, IRingRangeInternal>();
if (range is SingleRange)
{
var fullSingleRange = range as SingleRange;
var singleDevided = new EquallyDevidedSingleRange(fullSingleRange, numSubRanges);
for (int i = 0; i < numSubRanges; i++)
{
var singleRange = singleDevided.GetSubRange(i);
multiRanges[i] = singleRange;
}
}
else if (range is GeneralMultiRange)
{
var fullMultiRange = range as GeneralMultiRange;
// Take each of the single ranges in the multi range and divide each into equal sub ranges.
// Then go over all those and group them by sub range index.
var allSinglesDevided = new List<EquallyDevidedSingleRange>();
foreach (var singleRange in fullMultiRange.Ranges)
{
var singleDevided = new EquallyDevidedSingleRange(singleRange, numSubRanges);
allSinglesDevided.Add(singleDevided);
}
for (int i = 0; i < numSubRanges; i++)
{
var singlesForThisIndex = new List<IRingRange>();
foreach (var singleDevided in allSinglesDevided)
{
IRingRange singleRange = singleDevided.GetSubRange(i);
singlesForThisIndex.Add(singleRange);
}
IRingRangeInternal multi = (IRingRangeInternal)RangeFactory.CreateRange(singlesForThisIndex);
multiRanges[i] = multi;
}
}
if (multiRanges.Count == 0)
{
rangeSize = 0;
rangePercentage = 0;
}
else
{
rangeSize = multiRanges.Values.Sum(r => r.RangeSize());
rangePercentage = multiRanges.Values.Sum(r => r.RangePercentage());
}
}
internal IRingRange GetSubRange(int mySubRangeIndex)
{
return multiRanges[mySubRangeIndex];
}
public override string ToString()
{
return ToCompactString();
}
public string ToCompactString()
{
if (multiRanges.Count == 0) return "Empty EquallyDevidedMultiRange";
if (multiRanges.Count == 1) return multiRanges.First().Value.ToString();
return String.Format("<EquallyDevidedMultiRange: Size=x{0,8:X8}, %Ring={1:0.000}%>", rangeSize, rangePercentage);
}
public string ToFullString()
{
if (multiRanges.Count == 0) return "Empty EquallyDevidedMultiRange";
if (multiRanges.Count == 1) return multiRanges.First().Value.ToFullString();
return String.Format("<EquallyDevidedMultiRange: Size=x{0,8:X8}, %Ring={1:0.000}%, {2} Ranges: {3}>", rangeSize, rangePercentage, multiRanges.Count,
Utils.DictionaryToString(multiRanges, r => r.ToFullString()));
}
}
}
| |
////////////////////////////////////////////////////////////////////////////////
// //
// MIT X11 license, Copyright (c) 2005-2006 by: //
// //
// Authors: //
// Michael Dominic K. <[email protected]> //
// //
// Permission is hereby granted, free of charge, to any person obtaining a //
// copy of this software and associated documentation files (the "Software"), //
// to deal in the Software without restriction, including without limitation //
// the rights to use, copy, modify, merge, publish, distribute, sublicense, //
// and/or sell copies of the Software, and to permit persons to whom the //
// Software is furnished to do so, subject to the following conditions: //
// //
// The above copyright notice and this permission notice shall be included //
// in all copies or substantial portions of the Software. //
// //
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS //
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF //
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN //
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, //
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR //
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE //
// USE OR OTHER DEALINGS IN THE SOFTWARE. //
// //
////////////////////////////////////////////////////////////////////////////////
namespace Diva.Core {
using System;
using Widgets;
using System.Xml;
using Util;
using System.Collections.Generic;
using System.Collections;
using Basics;
public class OpenerTask : Task, IBoilProvider {
// Private structs ////////////////////////////////////////////
struct ObjectInfo {
public ObjectContainer Container;
public int[] Depends;
public string SystemType;
public int RefId;
/* CONSTRUCTOR */
public ObjectInfo (ObjectContainer container)
{
Container = container;
Depends = container.Depends.ToArray ();
SystemType = container.SystemType;
RefId = container.RefId;
}
public override string ToString ()
{
return String.Format ("Type: {0} Deps count: {1} Id: {2}",
SystemType, Depends.Length, RefId);
}
public bool IsUnBoilable (IBoilProvider provider)
{
if (Depends.Length == 0)
return true;
foreach (int id in Depends)
if (! (provider.Contains (id)))
return false;
return true;
}
}
// Enums //////////////////////////////////////////////////////
enum OpenerTaskStep { Init, Header, ProjectInfoRead, ObjectListRead,
ObjectListParse, ObjectListUnBoil, FindRoots,
Finished };
// Fields /////////////////////////////////////////////////////
string fileName; // Filename we're reading
XmlDocument xmlDocument; // Our document
//XmlNode projectInfoNode; // <projectinfo> node
IEnumerator objectsEnumerator; // Enumerator
List <ObjectInfo> objectsList; // Objects list
ObjectListContainer objectListContainer;
OpenerTaskStep currentStep; // Our current step
Dictionary <int, object> idToObject; // Id -> object
Dictionary <object, int> objectToId; // Object -> Id
string projectName = String.Empty;
string projectDirectory = String.Empty;
TagList projectTagList;
StuffList projectStuffList;
TrackList projectTrackList;
ClipList projectClipList;
MediaItemList projectMediaItemList;
Commander projectCommander;
Gdv.Pipeline projectPipeline;
Gdv.ProjectFormat projectFormat;
// Properties /////////////////////////////////////////////////
public string ProjectName {
get { return projectName; }
}
public string ProjectDirectory {
get { return projectDirectory; }
}
public TagList ProjectTagList {
get { return projectTagList; }
}
public StuffList ProjectStuffList {
get { return projectStuffList; }
}
public TrackList ProjectTrackList {
get { return projectTrackList; }
}
public ClipList ProjectClipList {
get { return projectClipList; }
}
public MediaItemList ProjectMediaItemList {
get { return projectMediaItemList; }
}
public Commander ProjectCommander {
get { return projectCommander; }
}
public Gdv.Pipeline ProjectPipeline {
get { return projectPipeline; }
}
public Gdv.ProjectFormat ProjectFormat {
get { return projectFormat; }
}
// Public methods /////////////////////////////////////////////
/* CONSTRUCTOR */
public OpenerTask (string fileName)
{
this.fileName = fileName;
var verbatimString = @"c:\test\";
var verbatimStringWithNewline = @"test \\ \n \t \r
a
b
c";
var verbatimStringWithEscapedQuotes = @"He said
""she says \"" is not an escaped character in verbatimstrings""
";
int[] numbers = { 5,6,4,2,4,6,8,9,7,0 };
var linqExample = from n in numbers
where n > 5
select n;
var anotherlinqExample = from n in numbers
orderby n descending
select n;
int[] someMoreNumbers = { 8,2,17,34,8,9,9,5,3,4,2,1,5 };
var moreLinq = from n in numbers
join mn in moreNumbers on n equals mn + 2
select new {n, mn};
}
public override void Reset ()
{
objectToId = new Dictionary <object, int> ();
idToObject = new Dictionary <int, object> ();
xmlDocument = null;
//projectInfoNode = null;
currentStep = OpenerTaskStep.Init;
base.Reset ();
}
public int GetIdForObject (object o)
{
return objectToId [o];
}
public object GetObjectForId (int id)
{
return idToObject [id];
}
public bool Contains (int id)
{
return idToObject.ContainsKey (id);
}
// Private methods ////////////////////////////////////////////
protected override TaskStatus ExecuteStep (int s)
{
bool cont = true;
// Main
switch (currentStep) {
case OpenerTaskStep.Init:
objectsList = new List <ObjectInfo> ();
xmlDocument = new XmlDocument ();
xmlDocument.Load (fileName);
currentStep = OpenerTaskStep.Header;
break;
case OpenerTaskStep.Header:
//ReadHeader ();
currentStep = OpenerTaskStep.ProjectInfoRead;
break;
case OpenerTaskStep.ProjectInfoRead:
foreach (XmlNode node in xmlDocument.DocumentElement.ChildNodes)
if (node.Name == "projectinfo")
ResolveProjectInfoNode (node);
// FIXME: Fail if not found/not resolved
currentStep = OpenerTaskStep.ObjectListRead;
break;
case OpenerTaskStep.ObjectListRead:
foreach (XmlNode node in xmlDocument.DocumentElement.ChildNodes)
if (node.Name == "objectlist")
objectListContainer = (ObjectListContainer)
DataFactory.MakeDataElement (node as XmlElement);
if (objectListContainer == null)
throw new Exception ("ObjectListContainer not found!");
currentStep = OpenerTaskStep.ObjectListParse;
break;
case OpenerTaskStep.ObjectListParse:
bool flush = EnumerateSomeObjects ();
if (flush)
currentStep = OpenerTaskStep.ObjectListUnBoil;
break;
case OpenerTaskStep.ObjectListUnBoil:
bool done = UnBoilSomeObjects ();
if (done)
currentStep = OpenerTaskStep.FindRoots;
break;
case OpenerTaskStep.FindRoots:
projectTrackList = (TrackList) FindRoot ("tracklist");
projectTagList = (TagList) FindRoot ("taglist");
projectStuffList = (StuffList) FindRoot ("stufflist");
projectClipList = (ClipList) FindRoot ("cliplist");
projectMediaItemList = (MediaItemList) FindRoot ("mediaitemlist");
projectPipeline = (Gdv.Pipeline) FindRoot ("pipeline");
projectCommander = (Commander) FindRoot ("commander");
projectFormat = (Gdv.ProjectFormat) FindRoot ("projectformat");
currentStep = OpenerTaskStep.Finished;
break;
case OpenerTaskStep.Finished:
cont = false;
break;
default:
break;
}
// Post
if (cont)
return TaskStatus.Running;
else
return TaskStatus.Done;
}
/*
void ReadHeader ()
{
// FIXME: Read all the attributes from the <divaproject> element
}*/
void ResolveProjectInfoNode (XmlNode node)
{
foreach (XmlNode childNode in node) {
switch (childNode.Name) {
case "name":
projectName = childNode.FirstChild.Value;
break;
case "directory":
projectDirectory = childNode.FirstChild.Value;
break;
// FIXME: Duration etc.
}
}
}
bool EnumerateSomeObjects ()
{
if (objectsEnumerator == null)
objectsEnumerator = objectListContainer.FindAllObjects ().GetEnumerator ();
for (int i = 0; i < 10; i++) {
if (objectsEnumerator.MoveNext () == false)
return true;
ObjectContainer container = (ObjectContainer)
objectsEnumerator.Current;
ObjectInfo newInfo = new ObjectInfo (container);
objectsList.Add (newInfo);
}
return false;
}
ObjectInfo GetNextCandidate ()
{
foreach (ObjectInfo objInfo in objectsList)
if (objInfo.IsUnBoilable (this))
return objInfo;
throw new Exception ("FIXME: No more unboilable objects found. Recursive?");
}
bool UnBoilSomeObjects ()
{
for (int i = 0; i < 5; i++) {
// All unboiled
if (objectsList.Count == 0)
return true;
ObjectInfo objInfo = GetNextCandidate ();
object o = BoilFactory.UnBoil (objInfo.Container, this);
objectsList.Remove (objInfo);
// Add
idToObject [objInfo.RefId] = o;
objectToId [o] = objInfo.RefId;
}
return false;
}
object FindRoot (string rootString)
{
ObjectContainer container = objectListContainer.FindObjectContainer (rootString);
return idToObject [container.RefId];
}
}
}
| |
// Copyright 2016 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
using Google.Cloud.ClientTesting;
using Google.Rpc;
using Google.Type;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Xunit;
namespace Google.Cloud.Vision.V1.Snippets
{
[SnippetOutputCollector]
public class ImageAnnotatorClientSnippets
{
[Fact]
public void Annotate()
{
Image image = LoadResourceImage("SchmidtBrinPage.jpg");
// Snippet: Annotate
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
AnnotateImageRequest request = new AnnotateImageRequest
{
Image = image,
Features =
{
new Feature { Type = Feature.Types.Type.FaceDetection },
// By default, no limits are put on the number of results per annotation.
// Use the MaxResults property to specify a limit.
new Feature { Type = Feature.Types.Type.LandmarkDetection, MaxResults = 5 },
}
};
AnnotateImageResponse response = client.Annotate(request);
Console.WriteLine("Faces:");
foreach (FaceAnnotation face in response.FaceAnnotations)
{
Console.WriteLine($" Confidence: {(int)(face.DetectionConfidence * 100)}%; BoundingPoly: {face.BoundingPoly}");
}
Console.WriteLine("Landmarks:");
foreach (EntityAnnotation landmark in response.LandmarkAnnotations)
{
Console.WriteLine($"Score: {(int)(landmark.Score * 100)}%; Description: {landmark.Description}");
}
if (response.Error != null)
{
Console.WriteLine($"Error detected: {response.Error}");
}
// End snippet
Assert.Equal(3, response.FaceAnnotations.Count);
Assert.Equal(0, response.LandmarkAnnotations.Count);
}
[Fact]
public void BatchAnnotateImages()
{
Image image1 = LoadResourceImage("SchmidtBrinPage.jpg");
Image image2 = LoadResourceImage("Chrome.png");
// Sample: BatchAnnotateImages
// Additional: BatchAnnotateImages(IEnumerable<AnnotateImageRequest>,*)
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
// Perform face recognition on one image, and logo recognition on another.
AnnotateImageRequest request1 = new AnnotateImageRequest
{
Image = image1,
Features = { new Feature { Type = Feature.Types.Type.FaceDetection } }
};
AnnotateImageRequest request2 = new AnnotateImageRequest
{
Image = image2,
Features = { new Feature { Type = Feature.Types.Type.LogoDetection } }
};
BatchAnnotateImagesResponse response = client.BatchAnnotateImages(new[] { request1, request2 });
Console.WriteLine("Faces in image 1:");
foreach (FaceAnnotation face in response.Responses[0].FaceAnnotations)
{
Console.WriteLine($" Confidence: {(int)(face.DetectionConfidence * 100)}%; BoundingPoly: {face.BoundingPoly}");
}
Console.WriteLine("Logos in image 2:");
foreach (EntityAnnotation logo in response.Responses[1].LogoAnnotations)
{
Console.WriteLine($"Description: {logo.Description}");
}
foreach (Status error in response.Responses.Select(r => r.Error))
{
Console.WriteLine($"Error detected: error");
}
// End sample
Assert.Equal(3, response.Responses[0].FaceAnnotations.Count);
Assert.Equal(1, response.Responses[1].LogoAnnotations.Count);
}
[Fact]
public void DetectFaces()
{
Image image = LoadResourceImage("SchmidtBrinPage.jpg");
// Snippet: DetectFaces
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
IReadOnlyList<FaceAnnotation> result = client.DetectFaces(image);
foreach (FaceAnnotation face in result)
{
Console.WriteLine($"Confidence: {(int)(face.DetectionConfidence * 100)}%; BoundingPoly: {face.BoundingPoly}");
}
// End snippet
Assert.Equal(3, result.Count);
// Check the bounding boxes of the faces, with a tolerance of 5px on each edge.
var rectangles = result.Select(x => Rectangle.FromBoundingPoly(x.BoundingPoly)).ToList();
Assert.True(rectangles.All(x => x != null));
rectangles = rectangles.OrderBy(r => r.Left).ToList();
Assert.True(rectangles[0].Equals(new Rectangle(196, 64, 293, 177), 5.0));
Assert.True(rectangles[1].Equals(new Rectangle(721, 162, 846, 308), 5.0));
Assert.True(rectangles[2].Equals(new Rectangle(1009, 113, 1149, 276), 5.0));
}
[Fact]
public void DetectLandmarks()
{
Image image = LoadResourceImage("SydneyOperaHouse.jpg");
// Snippet: DetectLandmarks
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
IReadOnlyList<EntityAnnotation> result = client.DetectLandmarks(image);
foreach (EntityAnnotation landmark in result)
{
Console.WriteLine($"Score: {(int)(landmark.Score * 100)}%; Description: {landmark.Description}");
}
// End snippet
Assert.Equal(2, result.Count);
var descriptions = result.Select(r => r.Description).OrderBy(d => d).ToList();
Assert.Equal(new[] { "Sydney Harbour Bridge", "Sydney Opera House" }, descriptions);
}
[Fact]
public void DetectImageProperties()
{
Image image = LoadResourceImage("SchmidtBrinPage.jpg");
// Snippet: DetectImageProperties
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
ImageProperties properties = client.DetectImageProperties(image);
ColorInfo dominantColor = properties.DominantColors.Colors.OrderByDescending(c => c.PixelFraction).First();
Console.WriteLine($"Dominant color in image: {dominantColor}");
// End snippet
Assert.Equal(0.18, dominantColor.PixelFraction, 2);
Assert.Equal(new Color { Red = 16, Green = 13, Blue = 8 }, dominantColor.Color);
}
[Fact]
public void DetectLabels()
{
Image image = LoadResourceImage("Gladiolos.jpg");
// Snippet: DetectLabels
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
IReadOnlyList<EntityAnnotation> labels = client.DetectLabels(image);
foreach (EntityAnnotation label in labels)
{
Console.WriteLine($"Score: {(int)(label.Score * 100)}%; Description: {label.Description}");
}
// End snippet
// Not exhaustive, but let's check certain basic labels.
var descriptions = labels.Select(l => l.Description).ToList();
Assert.Contains("flower", descriptions);
Assert.Contains("plant", descriptions);
Assert.Contains("vase", descriptions);
}
[Fact]
public void DetectText()
{
Image image = LoadResourceImage("Ellesborough.png");
// Snippet: DetectText
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
IReadOnlyList<EntityAnnotation> textAnnotations = client.DetectText(image);
foreach (EntityAnnotation text in textAnnotations)
{
Console.WriteLine($"Description: {text.Description}");
}
// End snippet
var descriptions = textAnnotations.Select(t => t.Description).ToList();
Assert.Contains("Ellesborough", descriptions);
}
[Fact]
public void DetectSafeSearch()
{
Image image = LoadResourceImage("SchmidtBrinPage.jpg");
// Snippet: DetectSafeSearch
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
SafeSearchAnnotation annotation = client.DetectSafeSearch(image);
// Each category is classified as Very Unlikely, Unlikely, Possible, Likely or Very Likely.
Console.WriteLine($"Adult? {annotation.Adult}");
Console.WriteLine($"Spoof? {annotation.Spoof}");
Console.WriteLine($"Violence? {annotation.Violence}");
Console.WriteLine($"Medical? {annotation.Medical}");
// End snippet
Assert.InRange(annotation.Adult, Likelihood.VeryUnlikely, Likelihood.Unlikely);
Assert.InRange(annotation.Spoof, Likelihood.VeryUnlikely, Likelihood.Unlikely);
Assert.InRange(annotation.Violence, Likelihood.VeryUnlikely, Likelihood.Unlikely);
Assert.InRange(annotation.Medical, Likelihood.VeryUnlikely, Likelihood.Unlikely);
}
[Fact]
public void DetectLogos()
{
Image image = LoadResourceImage("Chrome.png");
// Snippet: DetectLogos
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
IReadOnlyList<EntityAnnotation> logos = client.DetectLogos(image);
foreach (EntityAnnotation logo in logos)
{
Console.WriteLine($"Description: {logo.Description}");
}
// End snippet
Assert.Equal(1, logos.Count);
Assert.Equal("Google Chrome", logos[0].Description);
}
[Fact]
public void DetectCropHints()
{
Image image = LoadResourceImage("Gladiolos.jpg");
// Snippet: DetectCropHints
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
CropHintsAnnotation cropHints = client.DetectCropHints(image);
foreach (CropHint hint in cropHints.CropHints)
{
Console.WriteLine("Crop hint:");
string poly = string.Join(" - ", hint.BoundingPoly.Vertices.Select(v => $"({v.X}, {v.Y})"));
Console.WriteLine($" Poly: {poly}");
Console.WriteLine($" Confidence: {hint.Confidence}");
Console.WriteLine($" Importance fraction: {hint.ImportanceFraction}");
}
// End snippet
Assert.Equal(1, cropHints.CropHints.Count);
}
[Fact]
public void DetectDocumentText()
{
Image image = LoadResourceImage("DocumentText.png");
// Snippet: DetectDocumentText
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
TextAnnotation text = client.DetectDocumentText(image);
Console.WriteLine($"Text: {text.Text}");
foreach (var page in text.Pages)
{
foreach (var block in page.Blocks)
{
string box = string.Join(" - ", block.BoundingBox.Vertices.Select(v => $"({v.X}, {v.Y})"));
Console.WriteLine($"Block {block.BlockType} at {box}");
foreach (var paragraph in block.Paragraphs)
{
box = string.Join(" - ", paragraph.BoundingBox.Vertices.Select(v => $"({v.X}, {v.Y})"));
Console.WriteLine($" Paragraph at {box}");
foreach (var word in paragraph.Words)
{
Console.WriteLine($" Word: {string.Join("", word.Symbols.Select(s => s.Text))}");
}
}
}
}
// End snippet
var lines = text.Pages[0].Blocks
.Select(b => b.Paragraphs[0].Words.Select(w => string.Join("", w.Symbols.Select(s => s.Text))))
.ToList();
Assert.Equal(new[] { "Sample", "text", "line", "1", }, lines[0]);
Assert.Equal(new[] { "Text", "near", "the", "middle", }, lines[1]);
Assert.Equal(new[] { "Text", "near", "bottom", "right", }, lines[2]);
}
[Fact]
public void DetectWebInformation()
{
Image image = LoadResourceImage("SchmidtBrinPage.jpg");
// Snippet: DetectWebInformation
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
WebDetection webDetection = client.DetectWebInformation(image);
foreach (WebDetection.Types.WebImage webImage in webDetection.FullMatchingImages)
{
Console.WriteLine($"Full image: {webImage.Url} ({webImage.Score})");
}
foreach (WebDetection.Types.WebImage webImage in webDetection.PartialMatchingImages)
{
Console.WriteLine($"Partial image: {webImage.Url} ({webImage.Score})");
}
foreach (WebDetection.Types.WebPage webPage in webDetection.PagesWithMatchingImages)
{
Console.WriteLine($"Page with matching image: {webPage.Url} ({webPage.Score})");
}
foreach (WebDetection.Types.WebEntity entity in webDetection.WebEntities)
{
Console.WriteLine($"Web entity: {entity.EntityId} / {entity.Description} ({entity.Score})");
}
// End snippet
}
[Fact]
public void ErrorHandling_SingleImage()
{
// Sample: ErrorHandling_SingleImage
// We create a request which passes simple validation, but isn't a valid image.
Image image = Image.FromBytes(new byte[10]);
ImageAnnotatorClient client = ImageAnnotatorClient.Create();
try
{
IReadOnlyList<EntityAnnotation> logos = client.DetectLogos(image);
// Normally use logos here...
}
catch (AnnotateImageException e)
{
AnnotateImageResponse response = e.Response;
Console.WriteLine(response.Error);
}
// End sample
}
private static Image LoadResourceImage(string name)
{
var type = typeof(ImageAnnotatorClientSnippets);
using (var stream = type.GetTypeInfo().Assembly.GetManifestResourceStream($"{type.Namespace}.{name}"))
{
return Image.FromStream(stream);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Hopnscotch.Portal.Web.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try get sample provided for a specific mediaType, controllerName, actionName and parameterNames.
// If not found, try get the sample provided for a specific mediaType, controllerName and actionName regardless of the parameterNames
// If still not found, try get the sample provided for a specific type and mediaType
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create one using <see cref="ObjectGenerator"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// Try create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
sampleObject = objectGenerator.GenerateObject(type);
}
return sampleObject;
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
e.Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
/*
* Copyright (c) 2010, www.wojilu.com. All rights reserved.
*/
using System;
using System.Collections.Generic;
using wojilu.ORM;
using wojilu.Web.Mvc;
using wojilu.Web.Mvc.Attr;
using wojilu.Common.Comments;
using wojilu.Common.AppBase.Interface;
using wojilu.Data;
namespace wojilu.Web.Controller.Common {
public class CommentAdminController : ControllerBase {
public CommentAdminService commentService { get; set; }
public CommentAdminController() {
commentService = new CommentAdminService();
}
public void List() {
checkTypeName();
set( "ActionLink", to( Admin ) + "?type=" + ctx.Get( "type" ) );
set( "searchTarget", to( Search ) );
set( "typeFullName", ctx.Get( "type" ) );
set( "filterLink", to( List ) + "?type=" + ctx.Get( "type" ) );
set( "searchKey", ctx.Get( "q" ) );
setDroplist( ctx.Get( "t" ) );
String condition = getCondition();
initService();
IPageList list = commentService.GetPageAll( condition );
bindList( "list", "c", list.Results, bindLink );
set( "page", list.PageBar );
}
private void bindLink( IBlock block, String lbl, object obj ) {
IComment c = (IComment)obj;
block.Set( "c.sContent", strUtil.CutString( c.Content, 100 ) );
IEntity parent = ndb.findById( c.GetTargetType(), c.RootId );
if (parent == null) {
block.Set( "c.TargetTitle", "" );
block.Set( "c.Link", "#" );
}
else {
String title = parent.get( "Title" ).ToString();
block.Set( "c.TargetTitle", title );
String clink = alink.ToAppData( (IAppData)parent );
block.Set( "c.Link", clink );
}
}
public void Search() {
checkTypeName();
String t = ctx.Get( "t" );
String q = ctx.Get( "q" );
if (isInputValid( t, q ) == false) {
redirect( List );
return;
}
//------------------------------------------------------
set( "searchTarget", to( Search ) );
set( "typeFullName", ctx.Get( "type" ) );
set( "filterLink", to( List ) + "?type=" + ctx.Get( "type" ) );
q = strUtil.SqlClean( q, getInputMaxLength() );
set( "searchKey", q );
setDroplist( t );
String condition = getCondition( q, t );
initService();
IPageList list = commentService.GetPageAll( condition );
bindList( "list", "c", list.Results, bindLink );
set( "page", list.PageBar );
}
[HttpPost, DbTransaction]
public void Admin() {
checkTypeName();
String ids = ctx.PostIdList( "choice" );
initService();
commentService.DeleteBatch( ids );
echoAjaxOk();
}
//--------------------------------------------------------------------------------------------------------------
private void initService() {
String commentTypeName = ctx.Get( "type" );
IComment comment = Entity.New( commentTypeName ) as IComment;
commentService.setComment( comment );
}
private Boolean isInputValid( String t, String q ) {
if (strUtil.IsNullOrEmpty( t )) return false;
if (strUtil.IsNullOrEmpty( q )) return false;
if (q.Length > getInputMaxLength()) return false;
return getTypeList().Contains( t );
}
private int getInputMaxLength() {
return 20;
}
private List<string> getTypeList() {
List<string> typeList = new List<string>();
typeList.Add( "author" );
typeList.Add( "content" );
typeList.Add( "ip" );
return typeList;
}
private void setDroplist( String inputType ) {
foreach (String t in getTypeList()) {
String optionKey = "t" + t;
if (strUtil.IsNullOrEmpty( inputType )) {
set( optionKey, "" );
}
else if (inputType.Equals( t )) {
set( optionKey, "selected=\"selected\"" );
}
else
set( optionKey, "" );
}
}
private String getCondition( String q, String inputType ) {
foreach (String t in getTypeList()) {
if (inputType.Equals( t )) {
return t + " like '%" + q + "%'";
}
}
return "";
}
private String getCondition() {
String fTime = ctx.Get( "filter" );
if (strUtil.IsNullOrEmpty( fTime )) return "";
DateTime ft = getValidTime( fTime );
String tquote = getEntityInfo().Dialect.GetTimeQuote();
return "Created>" + tquote + ft.ToShortDateString() + tquote + "";
}
private static DateTime getValidTime( String time ) {
DateTime t = DateTime.Now.AddYears( 99 );
if (time.Equals( "day" ))
t = DateTime.Now.AddDays( -1 );
else if (time.Equals( "day2" ))
t = DateTime.Now.AddDays( -2 );
else if (time.Equals( "week" ))
t = DateTime.Now.AddDays( -7 );
else if (time.Equals( "month" ))
t = DateTime.Now.AddMonths( -1 );
else if (time.Equals( "month3" ))
t = DateTime.Now.AddMonths( -3 );
else if (time.Equals( "month6" ))
t = DateTime.Now.AddMonths( -6 );
return t;
}
private void checkTypeName() {
if (isTypeError( ctx.Get( "type" ) )) throw new Exception( lang( "exTypeError" ) );
}
private Boolean isTypeError( String typeName ) {
if (strUtil.IsNullOrEmpty( typeName )) return true;
if (Entity.GetType(typeName) == null) return true;
return false;
}
private EntityInfo getEntityInfo() {
return Entity.GetInfo( ctx.Get( "type" ) );
}
}
}
| |
//
// MonoTests.System.Xml.XPathNavigatorEvaluateTests
//
// Authors:
// Kral Ferch <[email protected]>
// Martin Willemoes Hansen <[email protected]>
//
// (C) 2002 Kral Ferch
// (C) 2003 Martin Willemoes Hansen
//
using System;
using System.Xml;
using System.Xml.XPath;
using NUnit.Framework;
namespace MonoTests.System.Xml
{
[TestFixture]
public class XPathNavigatorEvaluateTests : Assertion
{
XmlDocument document;
XPathNavigator navigator;
XmlDocument document2;
XPathNavigator navigator2;
XmlDocument document3;
XPathNavigator navigator3;
XPathExpression expression;
XPathNodeIterator iterator;
[SetUp]
public void GetReady ()
{
document = new XmlDocument ();
document.LoadXml ("<foo><bar/><baz/><qux/><squonk/></foo>");
navigator = document.CreateNavigator ();
document2 = new XmlDocument ();
document2.LoadXml ("<foo><bar baz='1'/><bar baz='2'/><bar baz='3'/></foo>");
navigator2 = document2.CreateNavigator ();
document3 = new XmlDocument ();
document3.LoadXml ("<foo><bar/><baz/><qux/></foo>");
navigator3 = document3.CreateNavigator ();
}
// Testing Core Funcetion Library functions defined at: http://www.w3.org/TR/xpath#corelib
[Test]
public void CoreFunctionNodeSetLast ()
{
expression = navigator.Compile("last()");
iterator = navigator.Select("/foo");
AssertEquals ("0", navigator.Evaluate ("last()").ToString());
AssertEquals ("0", navigator.Evaluate (expression, null).ToString ());
AssertEquals ("1", navigator.Evaluate (expression, iterator).ToString ());
iterator = navigator.Select("/foo/*");
AssertEquals ("4", navigator.Evaluate (expression, iterator).ToString ());
AssertEquals("3", navigator2.Evaluate ("string(//bar[last()]/@baz)"));
}
[Test]
public void CoreFunctionNodeSetPosition ()
{
expression = navigator.Compile("position()");
iterator = navigator.Select("/foo");
AssertEquals ("0", navigator.Evaluate ("position()").ToString ());
AssertEquals ("0", navigator.Evaluate (expression, null).ToString ());
AssertEquals ("0", navigator.Evaluate (expression, iterator).ToString ());
iterator = navigator.Select("/foo/*");
AssertEquals ("0", navigator.Evaluate (expression, iterator).ToString ());
iterator.MoveNext();
AssertEquals ("1", navigator.Evaluate (expression, iterator).ToString ());
iterator.MoveNext ();
AssertEquals ("2", navigator.Evaluate (expression, iterator).ToString ());
iterator.MoveNext ();
AssertEquals ("3", navigator.Evaluate (expression, iterator).ToString ());
}
[Test]
public void CoreFunctionNodeSetCount ()
{
AssertEquals ("5", navigator.Evaluate ("count(//*)").ToString ());
AssertEquals ("1", navigator.Evaluate ("count(//foo)").ToString ());
AssertEquals ("1", navigator.Evaluate ("count(/foo)").ToString ());
AssertEquals ("1", navigator.Evaluate ("count(/foo/bar)").ToString ());
AssertEquals ("3", navigator2.Evaluate ("count(//bar)").ToString ());
}
public void saveTestCoreFunctionNodeSetID ()
{
document.LoadXml (
"<!DOCTYPE foo [" +
"<!ELEMENT foo (bar)>" +
"<!ELEMENT bar EMPTY>" +
"<!ATTLIST bar baz ID #REQUIRED>" +
"]>" +
"<foo><bar baz='1' qux='hello' /><bar baz='2' qux='world' /></foo>");
navigator = document.CreateNavigator();
AssertEquals ("hello", navigator.Evaluate ("string(id('1')/@qux)").ToString ());
AssertEquals ("world", navigator.Evaluate ("string(id('2')/@qux)").ToString ());
}
[Test]
public void CoreFunctionLocalName ()
{
AssertEquals ("", navigator.Evaluate ("local-name()").ToString ());
AssertEquals ("", navigator.Evaluate ("local-name(/bogus)").ToString ());
AssertEquals ("foo", navigator.Evaluate ("local-name(/foo)").ToString ());
AssertEquals ("bar", navigator3.Evaluate ("local-name(/foo/*)").ToString ());
}
// TODO: umm. Unable to make this return a namespace-uri so far...
[Test]
public void CoreFunctionNamespaceURI ()
{
document.LoadXml ("<foo:bar xmlns:foo='#foo'><foo:baz><foo:qux /></foo:baz></foo:bar>");
navigator = document.CreateNavigator ();
AssertEquals ("", navigator.Evaluate ("namespace-uri()").ToString ());
AssertEquals ("", navigator.Evaluate ("namespace-uri(/bogus)").ToString ());
//AssertEquals("foo", navigator.Evaluate ("namespace-uri(/bar)").ToString ());
AssertEquals ("", navigator2.Evaluate ("namespace-uri(//bar)").ToString ());
}
public void saveTestCoreFunctionString ()
{
document.LoadXml ("<foo>hello<bar>world</bar><baz>how are you</baz></foo>");
navigator = document.CreateNavigator ();
AssertEquals ("world", navigator.Evaluate ("string(/foo/*)").ToString ());
AssertEquals ("NaN", navigator.Evaluate ("string(0 div 0)").ToString ());
try {
navigator.Evaluate ("string(+0)");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
AssertEquals ("0", navigator.Evaluate ("string(-0)").ToString ());
AssertEquals ("Infinity", navigator.Evaluate ("string(1 div 0)").ToString ());
AssertEquals ("-Infinity", navigator.Evaluate ("string(-1 div 0)").ToString ());
AssertEquals ("45", navigator.Evaluate ("string(45)").ToString ());
AssertEquals ("-22", navigator.Evaluate ("string(-22)").ToString ());
AssertEquals ("0.25", navigator.Evaluate ("string(.25)").ToString ());
AssertEquals ("-0.25", navigator.Evaluate ("string(-.25)").ToString ());
AssertEquals ("2", navigator.Evaluate ("string(2.0)").ToString ());
AssertEquals ("2.01", navigator.Evaluate ("string(2.01)").ToString ());
AssertEquals ("-3", navigator.Evaluate ("string(-3.0)").ToString ());
AssertEquals ("3.45", navigator.Evaluate ("string(3.45)").ToString ());
// Wonder what this will look like under a different platform.
AssertEquals("0.33333333333333331", navigator.Evaluate ("string(1 div 3)").ToString ());
}
[Test]
public void CoreFunctionConcat ()
{
try {
navigator.Evaluate ("concat()");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
try {
navigator.Evaluate ("concat('foo')");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
AssertEquals ("foobar", navigator.Evaluate ("concat('foo', 'bar')").ToString ());
AssertEquals ("foobarbaz", navigator.Evaluate ("concat('foo', 'bar', 'baz')").ToString ());
AssertEquals ("foobarbazqux", navigator.Evaluate ("concat('foo', 'bar', 'baz', 'qux')").ToString ());
AssertEquals ("foobarbazquxquux", navigator.Evaluate ("concat('foo', 'bar', 'baz', 'qux', 'quux')").ToString ());
}
[Test]
public void CoreFunctionStartsWith ()
{
try {
navigator.Evaluate ("starts-with()");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
try {
navigator.Evaluate ("starts-with('foo')");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
try {
navigator.Evaluate ("starts-with('foo', 'bar', 'baz')");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
Assert ((bool)navigator.Evaluate ("starts-with('foobar', 'foo')"));
Assert (!(bool)navigator.Evaluate ("starts-with('foobar', 'bar')"));
}
[Test]
public void CoreFunctionContains ()
{
try {
navigator.Evaluate ("contains()");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
try {
navigator.Evaluate ("contains('foo')");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
try {
navigator.Evaluate ("contains('foobar', 'oob', 'baz')");
Fail ("Expected an XPathException to be thrown.");
} catch (XPathException) {}
Assert ((bool)navigator.Evaluate ("contains('foobar', 'oob')"));
Assert (!(bool)navigator.Evaluate ("contains('foobar', 'baz')"));
}
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
namespace DiscUtils
{
using System;
using System.IO;
/// <summary>
/// Provides the base class for both <see cref="DiscFileInfo"/> and <see cref="DiscDirectoryInfo"/> objects.
/// </summary>
public class DiscFileSystemInfo
{
private DiscFileSystem _fileSystem;
private string _path;
internal DiscFileSystemInfo(DiscFileSystem fileSystem, string path)
{
if (path == null)
{
throw new ArgumentNullException("path");
}
_fileSystem = fileSystem;
_path = path.Trim('\\');
}
/// <summary>
/// Gets the file system the referenced file or directory exists on.
/// </summary>
public DiscFileSystem FileSystem
{
get { return _fileSystem; }
}
/// <summary>
/// Gets the name of the file or directory.
/// </summary>
public virtual string Name
{
get { return Utilities.GetFileFromPath(_path); }
}
/// <summary>
/// Gets the full path of the file or directory.
/// </summary>
public virtual string FullName
{
get { return _path; }
}
/// <summary>
/// Gets the extension part of the file or directory name.
/// </summary>
public virtual string Extension
{
get
{
string name = Name;
int sepIdx = name.LastIndexOf('.');
if (sepIdx >= 0)
{
return name.Substring(sepIdx + 1);
}
return string.Empty;
}
}
/// <summary>
/// Gets or sets the <see cref="System.IO.FileAttributes"/> of the current <see cref="DiscFileSystemInfo"/> object.
/// </summary>
public virtual FileAttributes Attributes
{
get { return FileSystem.GetAttributes(_path); }
set { FileSystem.SetAttributes(_path, value); }
}
/// <summary>
/// Gets the <see cref="DiscDirectoryInfo"/> of the directory containing the current <see cref="DiscFileSystemInfo"/> object.
/// </summary>
public virtual DiscDirectoryInfo Parent
{
get
{
if (string.IsNullOrEmpty(_path))
{
return null;
}
return new DiscDirectoryInfo(FileSystem, Utilities.GetDirectoryFromPath(_path));
}
}
/// <summary>
/// Gets a value indicating whether the file system object exists.
/// </summary>
public virtual bool Exists
{
get { return FileSystem.Exists(_path); }
}
/// <summary>
/// Gets or sets the creation time (in local time) of the current <see cref="DiscFileSystemInfo"/> object.
/// </summary>
public virtual DateTime CreationTime
{
get { return CreationTimeUtc.ToLocalTime(); }
set { CreationTimeUtc = value.ToUniversalTime(); }
}
/// <summary>
/// Gets or sets the creation time (in UTC) of the current <see cref="DiscFileSystemInfo"/> object.
/// </summary>
public virtual DateTime CreationTimeUtc
{
get { return FileSystem.GetCreationTimeUtc(_path); }
set { FileSystem.SetCreationTimeUtc(_path, value); }
}
/// <summary>
/// Gets or sets the last time (in local time) the file or directory was accessed.
/// </summary>
/// <remarks>Read-only file systems will never update this value, it will remain at a fixed value.</remarks>
public virtual DateTime LastAccessTime
{
get { return LastAccessTimeUtc.ToLocalTime(); }
set { LastAccessTimeUtc = value.ToUniversalTime(); }
}
/// <summary>
/// Gets or sets the last time (in UTC) the file or directory was accessed.
/// </summary>
/// <remarks>Read-only file systems will never update this value, it will remain at a fixed value.</remarks>
public virtual DateTime LastAccessTimeUtc
{
get { return FileSystem.GetLastAccessTimeUtc(_path); }
set { FileSystem.SetLastAccessTimeUtc(_path, value); }
}
/// <summary>
/// Gets or sets the last time (in local time) the file or directory was written to.
/// </summary>
public virtual DateTime LastWriteTime
{
get { return LastWriteTimeUtc.ToLocalTime(); }
set { LastWriteTimeUtc = value.ToUniversalTime(); }
}
/// <summary>
/// Gets or sets the last time (in UTC) the file or directory was written to.
/// </summary>
public virtual DateTime LastWriteTimeUtc
{
get { return FileSystem.GetLastWriteTimeUtc(_path); }
set { FileSystem.SetLastWriteTimeUtc(_path, value); }
}
/// <summary>
/// Gets the path to the referenced file.
/// </summary>
protected string Path
{
get { return _path; }
}
/// <summary>
/// Deletes a file or directory.
/// </summary>
public virtual void Delete()
{
if ((Attributes & FileAttributes.Directory) != 0)
{
FileSystem.DeleteDirectory(_path);
}
else
{
FileSystem.DeleteFile(_path);
}
}
/// <summary>
/// Indicates if <paramref name="obj"/> is equivalent to this object.
/// </summary>
/// <param name="obj">The object to compare</param>
/// <returns><c>true</c> if <paramref name="obj"/> is equivalent, else <c>false</c></returns>
public override bool Equals(object obj)
{
DiscFileSystemInfo asInfo = obj as DiscFileSystemInfo;
if (obj == null)
{
return false;
}
return string.Compare(Path, asInfo.Path, StringComparison.Ordinal) == 0 && DiscFileSystem.Equals(FileSystem, asInfo.FileSystem);
}
/// <summary>
/// Gets the hash code for this object.
/// </summary>
/// <returns>The hash code</returns>
public override int GetHashCode()
{
return _path.GetHashCode() ^ _fileSystem.GetHashCode();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using Microsoft.AspNetCore.Mvc.ViewComponents;
using Microsoft.AspNetCore.Mvc.ViewEngines;
using Microsoft.Extensions.Logging;
namespace Microsoft.AspNetCore.Mvc.ViewFeatures
{
internal static class MvcViewFeaturesLoggerExtensions
{
private static readonly Action<ILogger, string, string[], Exception> _viewComponentExecuting;
private static readonly Action<ILogger, string, double, string, Exception> _viewComponentExecuted;
private static readonly Action<ILogger, string, double, Exception> _partialViewFound;
private static readonly Action<ILogger, string, IEnumerable<string>, Exception> _partialViewNotFound;
private static readonly Action<ILogger, string, Exception> _partialViewResultExecuting;
private static readonly Action<ILogger, string, double, Exception> _partialViewResultExecuted;
private static readonly Action<ILogger, string, Exception> _antiforgeryTokenInvalid;
private static readonly Action<ILogger, string, Exception> _viewComponentResultExecuting;
private static readonly Action<ILogger, string, Exception> _viewResultExecuting;
private static readonly Action<ILogger, string, double, Exception> _viewResultExecuted;
private static readonly Action<ILogger, string, double, Exception> _viewFound;
private static readonly Action<ILogger, string, IEnumerable<string>, Exception> _viewNotFound;
private static readonly Action<ILogger, string, Exception> _tempDataCookieNotFound;
private static readonly Action<ILogger, string, Exception> _tempDataCookieLoadSuccess;
private static readonly Action<ILogger, string, Exception> _tempDataCookieLoadFailure;
private static readonly Action<ILogger, Type, Exception> _notMostEffectiveFilter;
private static readonly LogDefineOptions SkipEnabledCheckLogOptions = new() { SkipEnabledCheck = true };
static MvcViewFeaturesLoggerExtensions()
{
_viewComponentExecuting = LoggerMessage.Define<string, string[]>(
LogLevel.Debug,
new EventId(1, "ViewComponentExecuting"),
"Executing view component {ViewComponentName} with arguments ({Arguments}).",
SkipEnabledCheckLogOptions);
_viewComponentExecuted = LoggerMessage.Define<string, double, string>(
LogLevel.Debug,
new EventId(2, "ViewComponentExecuted"),
"Executed view component {ViewComponentName} in {ElapsedMilliseconds}ms and returned " +
"{ViewComponentResult}",
SkipEnabledCheckLogOptions);
_partialViewResultExecuting = LoggerMessage.Define<string>(
LogLevel.Information,
new EventId(1, "PartialViewResultExecuting"),
"Executing PartialViewResult, running view {PartialViewName}.");
_partialViewFound = LoggerMessage.Define<string, double>(
LogLevel.Debug,
new EventId(2, "PartialViewFound"),
"The partial view path '{PartialViewFilePath}' was found in {ElapsedMilliseconds}ms.");
_partialViewNotFound = LoggerMessage.Define<string, IEnumerable<string>>(
LogLevel.Error,
new EventId(3, "PartialViewNotFound"),
"The partial view '{PartialViewName}' was not found. Searched locations: {SearchedViewLocations}");
_partialViewResultExecuted = LoggerMessage.Define<string, double>(
LogLevel.Information,
new EventId(4, "PartialViewResultExecuted"),
"Executed PartialViewResult - view {PartialViewName} executed in {ElapsedMilliseconds}ms.");
_antiforgeryTokenInvalid = LoggerMessage.Define<string>(
LogLevel.Information,
new EventId(1, "AntiforgeryTokenInvalid"),
"Antiforgery token validation failed. {Message}");
_viewComponentResultExecuting = LoggerMessage.Define<string>(
LogLevel.Information,
new EventId(1, "ViewComponentResultExecuting"),
"Executing ViewComponentResult, running {ViewComponentName}.",
SkipEnabledCheckLogOptions);
_viewResultExecuting = LoggerMessage.Define<string>(
LogLevel.Information,
new EventId(1, "ViewResultExecuting"),
"Executing ViewResult, running view {ViewName}.");
_viewFound = LoggerMessage.Define<string, double>(
LogLevel.Debug,
new EventId(2, "ViewFound"),
"The view path '{ViewFilePath}' was found in {ElapsedMilliseconds}ms.");
_viewNotFound = LoggerMessage.Define<string, IEnumerable<string>>(
LogLevel.Error,
new EventId(3, "ViewNotFound"),
"The view '{ViewName}' was not found. Searched locations: {SearchedViewLocations}");
_viewResultExecuted = LoggerMessage.Define<string, double>(
LogLevel.Information,
new EventId(4, "ViewResultExecuted"),
"Executed ViewResult - view {ViewName} executed in {ElapsedMilliseconds}ms.");
_tempDataCookieNotFound = LoggerMessage.Define<string>(
LogLevel.Debug,
new EventId(1, "TempDataCookieNotFound"),
"The temp data cookie {CookieName} was not found.");
_tempDataCookieLoadSuccess = LoggerMessage.Define<string>(
LogLevel.Debug,
new EventId(2, "TempDataCookieLoadSuccess"),
"The temp data cookie {CookieName} was used to successfully load temp data.");
_tempDataCookieLoadFailure = LoggerMessage.Define<string>(
LogLevel.Warning,
new EventId(3, "TempDataCookieLoadFailure"),
"The temp data cookie {CookieName} could not be loaded.");
_notMostEffectiveFilter = LoggerMessage.Define<Type>(
LogLevel.Trace,
new EventId(1, "NotMostEffectiveFilter"),
"Skipping the execution of current filter as its not the most effective filter implementing the policy {FilterPolicy}.");
}
#nullable enable
public static IDisposable? ViewComponentScope(this ILogger logger, ViewComponentContext context)
{
return logger.BeginScope(new ViewComponentLogScope(context.ViewComponentDescriptor));
}
#nullable restore
public static void ViewComponentExecuting(
this ILogger logger,
ViewComponentContext context,
object[] arguments)
{
if (logger.IsEnabled(LogLevel.Debug))
{
var formattedArguments = GetFormattedArguments(arguments);
_viewComponentExecuting(logger, context.ViewComponentDescriptor.DisplayName, formattedArguments, null);
}
}
private static string[] GetFormattedArguments(object[] arguments)
{
if (arguments == null || arguments.Length == 0)
{
return Array.Empty<string>();
}
var formattedArguments = new string[arguments.Length];
for (var i = 0; i < formattedArguments.Length; i++)
{
formattedArguments[i] = Convert.ToString(arguments[i], CultureInfo.InvariantCulture);
}
return formattedArguments;
}
public static void ViewComponentExecuted(
this ILogger logger,
ViewComponentContext context,
TimeSpan timespan,
object result)
{
// Don't log if logging wasn't enabled at start of request as time will be wildly wrong.
if (logger.IsEnabled(LogLevel.Debug))
{
_viewComponentExecuted(
logger,
context.ViewComponentDescriptor.DisplayName,
timespan.TotalMilliseconds,
Convert.ToString(result, CultureInfo.InvariantCulture),
null);
}
}
public static void PartialViewFound(
this ILogger logger,
IView view,
TimeSpan timespan)
{
_partialViewFound(logger, view.Path, timespan.TotalMilliseconds, null);
}
public static void PartialViewNotFound(
this ILogger logger,
string partialViewName,
IEnumerable<string> searchedLocations)
{
_partialViewNotFound(logger, partialViewName, searchedLocations, null);
}
public static void PartialViewResultExecuting(this ILogger logger, string partialViewName)
{
_partialViewResultExecuting(logger, partialViewName, null);
}
public static void PartialViewResultExecuted(this ILogger logger, string partialViewName, TimeSpan timespan)
{
_partialViewResultExecuted(logger, partialViewName, timespan.TotalMilliseconds, null);
}
public static void AntiforgeryTokenInvalid(this ILogger logger, string message, Exception exception)
{
_antiforgeryTokenInvalid(logger, message, exception);
}
public static void ViewComponentResultExecuting(this ILogger logger, string viewComponentName)
{
if (logger.IsEnabled(LogLevel.Information))
{
_viewComponentResultExecuting(logger, viewComponentName, null);
}
}
public static void ViewComponentResultExecuting(this ILogger logger, Type viewComponentType)
{
if (logger.IsEnabled(LogLevel.Information))
{
_viewComponentResultExecuting(logger, viewComponentType.Name, null);
}
}
public static void ViewResultExecuting(this ILogger logger, string viewName)
{
_viewResultExecuting(logger, viewName, null);
}
public static void ViewResultExecuted(this ILogger logger, string viewName, TimeSpan timespan)
{
_viewResultExecuted(logger, viewName, timespan.TotalMilliseconds, null);
}
public static void ViewFound(this ILogger logger, IView view, TimeSpan timespan)
{
_viewFound(logger, view.Path, timespan.TotalMilliseconds, null);
}
public static void ViewNotFound(this ILogger logger, string viewName,
IEnumerable<string> searchedLocations)
{
_viewNotFound(logger, viewName, searchedLocations, null);
}
public static void TempDataCookieNotFound(this ILogger logger, string cookieName)
{
_tempDataCookieNotFound(logger, cookieName, null);
}
public static void TempDataCookieLoadSuccess(this ILogger logger, string cookieName)
{
_tempDataCookieLoadSuccess(logger, cookieName, null);
}
public static void TempDataCookieLoadFailure(this ILogger logger, string cookieName, Exception exception)
{
_tempDataCookieLoadFailure(logger, cookieName, exception);
}
public static void NotMostEffectiveFilter(this ILogger logger, Type policyType)
{
_notMostEffectiveFilter(logger, policyType, null);
}
private class ViewComponentLogScope : IReadOnlyList<KeyValuePair<string, object>>
{
private readonly ViewComponentDescriptor _descriptor;
public ViewComponentLogScope(ViewComponentDescriptor descriptor)
{
_descriptor = descriptor;
}
public KeyValuePair<string, object> this[int index]
{
get
{
if (index == 0)
{
return new KeyValuePair<string, object>("ViewComponentName", _descriptor.DisplayName);
}
else if (index == 1)
{
return new KeyValuePair<string, object>("ViewComponentId", _descriptor.Id);
}
throw new IndexOutOfRangeException(nameof(index));
}
}
public int Count => 2;
public IEnumerator<KeyValuePair<string, object>> GetEnumerator()
{
for (var i = 0; i < Count; ++i)
{
yield return this[i];
}
}
public override string ToString()
{
return _descriptor.DisplayName;
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
}
}
}
| |
/*
* Infoplus API
*
* Infoplus API.
*
* OpenAPI spec version: v1.0
* Contact: [email protected]
* Generated by: https://github.com/swagger-api/swagger-codegen.git
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Text.RegularExpressions;
using System.IO;
using System.Web;
using System.Linq;
using System.Net;
using System.Text;
using Newtonsoft.Json;
using RestSharp;
namespace Infoplus.Client
{
/// <summary>
/// API client is mainly responsible for making the HTTP call to the API backend.
/// </summary>
public partial class ApiClient
{
private JsonSerializerSettings serializerSettings = new JsonSerializerSettings
{
ConstructorHandling = ConstructorHandling.AllowNonPublicDefaultConstructor
};
/// <summary>
/// Allows for extending request processing for <see cref="ApiClient"/> generated code.
/// </summary>
/// <param name="request">The RestSharp request object</param>
partial void InterceptRequest(IRestRequest request);
/// <summary>
/// Allows for extending response processing for <see cref="ApiClient"/> generated code.
/// </summary>
/// <param name="request">The RestSharp request object</param>
/// <param name="response">The RestSharp response object</param>
partial void InterceptResponse(IRestRequest request, IRestResponse response);
/// <summary>
/// Initializes a new instance of the <see cref="ApiClient" /> class
/// with default configuration and base path (https://kingsrook.localhost-testsubdomain1.infopluswms.com:8443/infoplus-wms/api).
/// </summary>
public ApiClient()
{
Configuration = Configuration.Default;
RestClient = new RestClient("https://kingsrook.localhost-testsubdomain1.infopluswms.com:8443/infoplus-wms/api");
}
/// <summary>
/// Initializes a new instance of the <see cref="ApiClient" /> class
/// with default base path (https://kingsrook.localhost-testsubdomain1.infopluswms.com:8443/infoplus-wms/api).
/// </summary>
/// <param name="config">An instance of Configuration.</param>
public ApiClient(Configuration config = null)
{
if (config == null)
Configuration = Configuration.Default;
else
Configuration = config;
RestClient = new RestClient("https://kingsrook.localhost-testsubdomain1.infopluswms.com:8443/infoplus-wms/api");
}
/// <summary>
/// Initializes a new instance of the <see cref="ApiClient" /> class
/// with default configuration.
/// </summary>
/// <param name="basePath">The base path.</param>
public ApiClient(String basePath = "https://kingsrook.localhost-testsubdomain1.infopluswms.com:8443/infoplus-wms/api")
{
if (String.IsNullOrEmpty(basePath))
throw new ArgumentException("basePath cannot be empty");
RestClient = new RestClient(basePath);
Configuration = Configuration.Default;
}
/// <summary>
/// Gets or sets the default API client for making HTTP calls.
/// </summary>
/// <value>The default API client.</value>
[Obsolete("ApiClient.Default is deprecated, please use 'Configuration.Default.ApiClient' instead.")]
public static ApiClient Default;
/// <summary>
/// Gets or sets the Configuration.
/// </summary>
/// <value>An instance of the Configuration.</value>
public Configuration Configuration { get; set; }
/// <summary>
/// Gets or sets the RestClient.
/// </summary>
/// <value>An instance of the RestClient</value>
public RestClient RestClient { get; set; }
// Creates and sets up a RestRequest prior to a call.
private RestRequest PrepareRequest(
String path, RestSharp.Method method, Dictionary<String, String> queryParams, Object postBody,
Dictionary<String, String> headerParams, Dictionary<String, String> formParams,
Dictionary<String, FileParameter> fileParams, Dictionary<String, String> pathParams,
String contentType)
{
var request = new RestRequest(path, method);
// add path parameter, if any
foreach(var param in pathParams)
request.AddParameter(param.Key, param.Value, ParameterType.UrlSegment);
// add header parameter, if any
foreach(var param in headerParams)
request.AddHeader(param.Key, param.Value);
// add query parameter, if any
foreach(var param in queryParams)
request.AddQueryParameter(param.Key, param.Value);
// add form parameter, if any
foreach(var param in formParams)
request.AddParameter(param.Key, param.Value);
// add file parameter, if any
foreach(var param in fileParams)
{
request.AddFile(param.Value.Name, param.Value.Writer, param.Value.FileName, param.Value.ContentType);
}
if (postBody != null) // http body (model or byte[]) parameter
{
if (postBody.GetType() == typeof(String))
{
request.AddParameter("application/json", postBody, ParameterType.RequestBody);
}
else if (postBody.GetType() == typeof(byte[]))
{
request.AddParameter(contentType, postBody, ParameterType.RequestBody);
}
}
return request;
}
/// <summary>
/// Makes the HTTP request (Sync).
/// </summary>
/// <param name="path">URL path.</param>
/// <param name="method">HTTP method.</param>
/// <param name="queryParams">Query parameters.</param>
/// <param name="postBody">HTTP body (POST request).</param>
/// <param name="headerParams">Header parameters.</param>
/// <param name="formParams">Form parameters.</param>
/// <param name="fileParams">File parameters.</param>
/// <param name="pathParams">Path parameters.</param>
/// <param name="contentType">Content Type of the request</param>
/// <returns>Object</returns>
public Object CallApi(
String path, RestSharp.Method method, Dictionary<String, String> queryParams, Object postBody,
Dictionary<String, String> headerParams, Dictionary<String, String> formParams,
Dictionary<String, FileParameter> fileParams, Dictionary<String, String> pathParams,
String contentType)
{
var request = PrepareRequest(
path, method, queryParams, postBody, headerParams, formParams, fileParams,
pathParams, contentType);
// set timeout
RestClient.Timeout = Configuration.Timeout;
// set user agent
RestClient.UserAgent = Configuration.UserAgent;
InterceptRequest(request);
var response = RestClient.Execute(request);
InterceptResponse(request, response);
return (Object) response;
}
/// <summary>
/// Makes the asynchronous HTTP request.
/// </summary>
/// <param name="path">URL path.</param>
/// <param name="method">HTTP method.</param>
/// <param name="queryParams">Query parameters.</param>
/// <param name="postBody">HTTP body (POST request).</param>
/// <param name="headerParams">Header parameters.</param>
/// <param name="formParams">Form parameters.</param>
/// <param name="fileParams">File parameters.</param>
/// <param name="pathParams">Path parameters.</param>
/// <param name="contentType">Content type.</param>
/// <returns>The Task instance.</returns>
public async System.Threading.Tasks.Task<Object> CallApiAsync(
String path, RestSharp.Method method, Dictionary<String, String> queryParams, Object postBody,
Dictionary<String, String> headerParams, Dictionary<String, String> formParams,
Dictionary<String, FileParameter> fileParams, Dictionary<String, String> pathParams,
String contentType)
{
var request = PrepareRequest(
path, method, queryParams, postBody, headerParams, formParams, fileParams,
pathParams, contentType);
InterceptRequest(request);
var response = await RestClient.ExecuteTaskAsync(request);
InterceptResponse(request, response);
return (Object)response;
}
/// <summary>
/// Escape string (url-encoded).
/// </summary>
/// <param name="str">String to be escaped.</param>
/// <returns>Escaped string.</returns>
public string EscapeString(string str)
{
return UrlEncode(str);
}
/// <summary>
/// Create FileParameter based on Stream.
/// </summary>
/// <param name="name">Parameter name.</param>
/// <param name="stream">Input stream.</param>
/// <returns>FileParameter.</returns>
public FileParameter ParameterToFile(string name, Stream stream)
{
if (stream is FileStream)
return FileParameter.Create(name, ReadAsBytes(stream), Path.GetFileName(((FileStream)stream).Name));
else
return FileParameter.Create(name, ReadAsBytes(stream), "no_file_name_provided");
}
/// <summary>
/// If parameter is DateTime, output in a formatted string (default ISO 8601), customizable with Configuration.DateTime.
/// If parameter is a list, join the list with ",".
/// Otherwise just return the string.
/// </summary>
/// <param name="obj">The parameter (header, path, query, form).</param>
/// <returns>Formatted string.</returns>
public string ParameterToString(object obj)
{
if (obj is DateTime)
// Return a formatted date string - Can be customized with Configuration.DateTimeFormat
// Defaults to an ISO 8601, using the known as a Round-trip date/time pattern ("o")
// https://msdn.microsoft.com/en-us/library/az4se3k1(v=vs.110).aspx#Anchor_8
// For example: 2009-06-15T13:45:30.0000000
return ((DateTime)obj).ToString (Configuration.DateTimeFormat);
else if (obj is DateTimeOffset)
// Return a formatted date string - Can be customized with Configuration.DateTimeFormat
// Defaults to an ISO 8601, using the known as a Round-trip date/time pattern ("o")
// https://msdn.microsoft.com/en-us/library/az4se3k1(v=vs.110).aspx#Anchor_8
// For example: 2009-06-15T13:45:30.0000000
return ((DateTimeOffset)obj).ToString (Configuration.DateTimeFormat);
else if (obj is IList)
{
var flattenedString = new StringBuilder();
foreach (var param in (IList)obj)
{
if (flattenedString.Length > 0)
flattenedString.Append(",");
flattenedString.Append(param);
}
return flattenedString.ToString();
}
else
return Convert.ToString (obj);
}
/// <summary>
/// Deserialize the JSON string into a proper object.
/// </summary>
/// <param name="response">The HTTP response.</param>
/// <param name="type">Object type.</param>
/// <returns>Object representation of the JSON string.</returns>
public object Deserialize(IRestResponse response, Type type)
{
IList<Parameter> headers = response.Headers;
if (type == typeof(byte[])) // return byte array
{
return response.RawBytes;
}
if (type == typeof(Stream))
{
if (headers != null)
{
var filePath = String.IsNullOrEmpty(Configuration.TempFolderPath)
? Path.GetTempPath()
: Configuration.TempFolderPath;
var regex = new Regex(@"Content-Disposition=.*filename=['""]?([^'""\s]+)['""]?$");
foreach (var header in headers)
{
var match = regex.Match(header.ToString());
if (match.Success)
{
string fileName = filePath + SanitizeFilename(match.Groups[1].Value.Replace("\"", "").Replace("'", ""));
File.WriteAllBytes(fileName, response.RawBytes);
return new FileStream(fileName, FileMode.Open);
}
}
}
var stream = new MemoryStream(response.RawBytes);
return stream;
}
if (type.Name.StartsWith("System.Nullable`1[[System.DateTime")) // return a datetime object
{
return DateTime.Parse(response.Content, null, System.Globalization.DateTimeStyles.RoundtripKind);
}
if (type == typeof(String) || type.Name.StartsWith("System.Nullable")) // return primitive type
{
return ConvertType(response.Content, type);
}
// at this point, it must be a model (json)
try
{
return JsonConvert.DeserializeObject(response.Content, type, serializerSettings);
}
catch (Exception e)
{
throw new ApiException(500, e.Message);
}
}
/// <summary>
/// Serialize an input (model) into JSON string
/// </summary>
/// <param name="obj">Object.</param>
/// <returns>JSON string.</returns>
public String Serialize(object obj)
{
try
{
return obj != null ? JsonConvert.SerializeObject(obj) : null;
}
catch (Exception e)
{
throw new ApiException(500, e.Message);
}
}
/// <summary>
/// Select the Content-Type header's value from the given content-type array:
/// if JSON exists in the given array, use it;
/// otherwise use the first one defined in 'consumes'
/// </summary>
/// <param name="contentTypes">The Content-Type array to select from.</param>
/// <returns>The Content-Type header to use.</returns>
public String SelectHeaderContentType(String[] contentTypes)
{
if (contentTypes.Length == 0)
return null;
if (contentTypes.Contains("application/json", StringComparer.OrdinalIgnoreCase))
return "application/json";
return contentTypes[0]; // use the first content type specified in 'consumes'
}
/// <summary>
/// Select the Accept header's value from the given accepts array:
/// if JSON exists in the given array, use it;
/// otherwise use all of them (joining into a string)
/// </summary>
/// <param name="accepts">The accepts array to select from.</param>
/// <returns>The Accept header to use.</returns>
public String SelectHeaderAccept(String[] accepts)
{
if (accepts.Length == 0)
return null;
if (accepts.Contains("application/json", StringComparer.OrdinalIgnoreCase))
return "application/json";
return String.Join(",", accepts);
}
/// <summary>
/// Encode string in base64 format.
/// </summary>
/// <param name="text">String to be encoded.</param>
/// <returns>Encoded string.</returns>
public static string Base64Encode(string text)
{
return System.Convert.ToBase64String(System.Text.Encoding.UTF8.GetBytes(text));
}
/// <summary>
/// Dynamically cast the object into target type.
/// Ref: http://stackoverflow.com/questions/4925718/c-dynamic-runtime-cast
/// </summary>
/// <param name="source">Object to be casted</param>
/// <param name="dest">Target type</param>
/// <returns>Casted object</returns>
public static dynamic ConvertType(dynamic source, Type dest)
{
return Convert.ChangeType(source, dest);
}
/// <summary>
/// Convert stream to byte array
/// Credit/Ref: http://stackoverflow.com/a/221941/677735
/// </summary>
/// <param name="input">Input stream to be converted</param>
/// <returns>Byte array</returns>
public static byte[] ReadAsBytes(Stream input)
{
byte[] buffer = new byte[16*1024];
using (MemoryStream ms = new MemoryStream())
{
int read;
while ((read = input.Read(buffer, 0, buffer.Length)) > 0)
{
ms.Write(buffer, 0, read);
}
return ms.ToArray();
}
}
/// <summary>
/// URL encode a string
/// Credit/Ref: https://github.com/restsharp/RestSharp/blob/master/RestSharp/Extensions/StringExtensions.cs#L50
/// </summary>
/// <param name="input">String to be URL encoded</param>
/// <returns>Byte array</returns>
public static string UrlEncode(string input)
{
const int maxLength = 32766;
if (input == null)
{
throw new ArgumentNullException("input");
}
if (input.Length <= maxLength)
{
return Uri.EscapeDataString(input);
}
StringBuilder sb = new StringBuilder(input.Length * 2);
int index = 0;
while (index < input.Length)
{
int length = Math.Min(input.Length - index, maxLength);
string subString = input.Substring(index, length);
sb.Append(Uri.EscapeDataString(subString));
index += subString.Length;
}
return sb.ToString();
}
/// <summary>
/// Sanitize filename by removing the path
/// </summary>
/// <param name="filename">Filename</param>
/// <returns>Filename</returns>
public static string SanitizeFilename(string filename)
{
Match match = Regex.Match(filename, @".*[/\\](.*)$");
if (match.Success)
{
return match.Groups[1].Value;
}
else
{
return filename;
}
}
}
}
| |
/*
* Copyright (c) 2015, InWorldz Halcyon Developers
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* * Redistributions of source code must retain the above copyright notice, this
* list of conditions and the following disclaimer.
*
* * Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
*
* * Neither the name of halcyon nor the names of its
* contributors may be used to endorse or promote products derived from
* this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
* CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
* OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using OpenSim.Framework;
using OpenSim.Region.Physics.Manager;
using System.Threading;
using System.IO;
using log4net;
using System.Reflection;
using OpenSim.Region.Physics.ConvexDecompositionDotNet;
namespace InWorldz.PhysxPhysics.Meshing
{
/// <summary>
/// Manages the meshing of physics objects. Actual meshing is done in a separate
/// thread to not stop/hinder the main loop or the physical simulation
/// </summary>
internal partial class MeshingStage : IDisposable
{
private static readonly ILog m_log
= LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
public delegate void ShapeNeedsFreeingDelegate(PhysicsShape shape);
public delegate void TerrainMeshingCompleteDelegate(Tuple<PhysX.TriangleMesh, MemoryStream> terrainData);
/// <summary>
/// This event should be hooked to enable the proper freeing of physx geometries in
/// the simulation thread to prevent threading problems
/// </summary>
public event ShapeNeedsFreeingDelegate OnShapeNeedsFreeing;
/// <summary>
/// Ripped from ODE. Don't know why "32" is the magic number here
/// </summary>
public const float SCULPT_MESH_LOD = (float)LevelOfDetail.High;
public delegate void MeshingCompleteDelegate(PhysicsShape shape);
internal interface IMeshingQueueItem
{
void Execute(MeshingStage meshingStage);
}
private PhysX.Scene _scene;
private OpenSim.Region.Physics.Manager.IMesher _mesher;
private TerrainMesher _terrainMesher;
private Queue<IMeshingQueueItem> _meshWaitingQueue = new Queue<IMeshingQueueItem>();
private Thread _runThread;
private ShapeCache _singleConvexHullCache = new ShapeCache();
private ShapeCache _acdHullCache = new ShapeCache();
private ShapeCache _trimeshCache = new ShapeCache();
private volatile bool _stop = false;
private PhysX.Cooking _cooking;
public static HacdHullCache HullCache = new HacdHullCache();
public MeshingStage(PhysX.Scene scene, OpenSim.Region.Physics.Manager.IMesher mesher, TerrainMesher terrainMesher)
{
_scene = scene;
_mesher = mesher;
_terrainMesher = terrainMesher;
PhysX.CookingParams cookParms = new PhysX.CookingParams
{
SkinWidth = 0.025f,
SuppressTriangleMeshRemapTable = true,
TargetPlatform = PhysX.Platform.PC
};
_cooking = _scene.Physics.CreateCooking(cookParms);
_singleConvexHullCache.OnShapeNeedsFreeing += new ShapeCache.ShapeNeedsFreeingDelegate(_shapeCache_OnShapeNeedsFreeing);
_acdHullCache.OnShapeNeedsFreeing += new ShapeCache.ShapeNeedsFreeingDelegate(_shapeCache_OnShapeNeedsFreeing);
_trimeshCache.OnShapeNeedsFreeing += new ShapeCache.ShapeNeedsFreeingDelegate(_shapeCache_OnShapeNeedsFreeing);
_runThread = new Thread(new ThreadStart(this.QueueProcessorLoop));
_runThread.Name = "MeshingStage";
_runThread.Start();
}
void _shapeCache_OnShapeNeedsFreeing(PhysicsShape shape)
{
if (OnShapeNeedsFreeing != null)
{
OnShapeNeedsFreeing(shape);
}
}
private void QueueProcessorLoop()
{
while (!_stop)
{
ProcessNextQueueItem();
}
}
internal void QueueForMeshing(string primName, OpenSim.Framework.PrimitiveBaseShape pbs, OpenMetaverse.Vector3 size, float lod,
bool isDynamic, byte[] serializedShapes, bool fromCrossing, MeshingCompleteDelegate completedDelegate)
{
lock (_meshWaitingQueue)
{
_meshWaitingQueue.Enqueue(
new MeshingQueueItem
{
PrimName = primName,
Shape = pbs,
Size = size,
LOD = lod,
IsDynamic = isDynamic,
SerializedShapes = serializedShapes,
CompletedDelegate = completedDelegate,
FromCrossing = fromCrossing
});
Monitor.Pulse(_meshWaitingQueue);
}
}
internal void UnrefShape(PhysicsShape shape, bool dynamic)
{
lock (_meshWaitingQueue)
{
_meshWaitingQueue.Enqueue(new UnrefShapeItem
{
Shape = shape,
Dynamic = dynamic
});
Monitor.Pulse(_meshWaitingQueue);
}
}
internal void MeshHeightfield(float[] heightField, TerrainMeshingCompleteDelegate callback)
{
lock (_meshWaitingQueue)
{
_meshWaitingQueue.Enqueue(new MeshHeightfieldItem
{
Terrain = heightField,
CompleteCallback = callback
});
Monitor.Pulse(_meshWaitingQueue);
}
}
private void ProcessNextQueueItem()
{
IMeshingQueueItem item;
lock (_meshWaitingQueue)
{
while (!_stop && _meshWaitingQueue.Count == 0)
{
Monitor.Wait(_meshWaitingQueue);
}
if (_stop) return;
if (_meshWaitingQueue.Count > 0)
{
item = _meshWaitingQueue.Dequeue();
}
else
{
return;
}
}
item.Execute(this);
}
private Tuple<PhysX.Geometry, ShapeType> GenerateBasicPhysXShape(PrimitiveBaseShape primitiveBaseShape)
{
if (primitiveBaseShape.ProfileShape == ProfileShape.HalfCircle)
{
//this is a sphere
PhysX.Geometry geom = this.GeneratePhysXSphereShape(primitiveBaseShape);
return new Tuple<PhysX.Geometry, ShapeType>(geom, ShapeType.PrimitiveSphere);
}
else
{
//this is a box
PhysX.Geometry geom = this.GeneratePhysXBoxShape(primitiveBaseShape);
return new Tuple<PhysX.Geometry, ShapeType>(geom, ShapeType.PrimitiveBox);
}
}
private PhysX.Geometry GeneratePhysXBoxShape(PrimitiveBaseShape primitiveBaseShape)
{
//box geometry size is specified in half extents
return new PhysX.BoxGeometry(primitiveBaseShape.Scale.X / 2.0f, primitiveBaseShape.Scale.Y / 2.0f, primitiveBaseShape.Scale.Z / 2.0f);
}
private PhysX.Geometry GeneratePhysXSphereShape(PrimitiveBaseShape primitiveBaseShape)
{
return new PhysX.SphereGeometry(primitiveBaseShape.Scale.X / 2.0f);
}
private PhysX.TriangleMeshGeometry GeneratePhysXTrimeshShape(string primName, PrimitiveBaseShape shape, OpenMetaverse.Vector3 size, float LOD, bool isDynamic)
{
MeshingResult result = _mesher.CreateMesh(primName, shape, size, LOD, ShapeType.TriMesh, true);
if (result == null) return null;
IMesh mesh = result.TriMesh;
if (mesh == null) return null;
int[] indexes = mesh.getIndexListAsInt();
PhysX.Math.Vector3[] verts = PhysUtil.OmvVectorArrayToPhysx(mesh.getVertexListAsArray());
mesh.ReleaseSourceMeshData();
PhysX.TriangleMeshDesc desc = new PhysX.TriangleMeshDesc
{
Points = verts,
Triangles = indexes,
};
if (!desc.IsValid())
{
m_log.Warn("[InWorldz.PhysxPhysics] Unable to create trimesh for shape. Invalid description.");
return null;
}
using (MemoryStream ms = new MemoryStream())
{
try
{
if (!_cooking.CookTriangleMesh(desc, ms))
{
m_log.Warn("[InWorldz.PhysxPhysics] Unable to create trimesh for shape.");
return null;
}
}
catch (Exception e)
{
m_log.Warn("[InWorldz.PhysxPhysics] Unable to create trimesh for shape: {0}", e);
return null;
}
ms.Position = 0;
try
{
PhysX.TriangleMesh triMesh = _scene.Physics.CreateTriangleMesh(ms);
//m_log.DebugFormat("Trimesh Created: {0} {1}", triMesh.GetHashCode(), primName);
PhysX.TriangleMeshGeometry triGeom = new PhysX.TriangleMeshGeometry(triMesh);
return triGeom;
}
catch (Exception e)
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Unable to create trimesh for shape: {0}", e);
return null;
}
}
}
private List<PhysX.ConvexMeshGeometry> GenerateComplexPhysXShape(ulong meshHash, string primName, PrimitiveBaseShape shape, OpenMetaverse.Vector3 size,
float LOD, bool isDynamic)
{
//create the mesh and do not prescale it. the ACD algorithm can then cache the output hulls and
//scale as appropriate
MeshingResult result = _mesher.CreateMesh(primName, shape, size, LOD, ShapeType.DecomposedConvexHulls, false);
if (result == null) return null;
HacdConvexHull[] hulls = null;
if (result.ResultType == ShapeType.TriMesh)
{
IMesh mesh = result.TriMesh;
if (mesh == null) return null;
//Debugging.VrmlGenerator.SaveToVrmlFile("lastMesh.wrl", mesh.getVertexListAsArray(), mesh.getTriangleList());
switch (ShapeDeterminer.FindBestAcdAlgorithm(shape))
{
case ShapeDeterminer.AcdAlgorithm.HACD:
hulls = DecomposeWithHACD(shape, LOD, mesh);
break;
case ShapeDeterminer.AcdAlgorithm.RATCLIFF:
hulls = DecomposeWithRatcliff(shape, LOD, mesh);
break;
default:
throw new PhysxSdkException("GenerateComplexPhysXShape(): Specified ACD algorithm does not exist");
}
if (hulls == null)
{
return null;
}
}
else if (result.ResultType == ShapeType.DecomposedConvexHulls)
{
hulls = new HacdConvexHull[result.ConvexVerts.Count];
for (int i = 0; i < result.ConvexVerts.Count; i++)
{
hulls[i] = new HacdConvexHull { Vertices = new PhysX.Math.Vector3[result.ConvexVerts[i].Count] };
for (int j = 0; j < result.ConvexVerts[i].Count; j++)
{
var vert = result.ConvexVerts[i][j];
hulls[i].Vertices[j] = new PhysX.Math.Vector3(vert.X, vert.Y, vert.Z);
}
}
}
else
{
return null;
}
HacdConvexHull.Scale(size, hulls);
List<PhysX.ConvexMeshGeometry> ret = new List<PhysX.ConvexMeshGeometry>();
try
{
foreach (HacdConvexHull hull in hulls)
{
PhysX.ConvexMeshDesc convexMeshDesc = new PhysX.ConvexMeshDesc()
{
Flags = PhysX.ConvexFlag.InflateConvex
};
if (hull.Indicies == null) convexMeshDesc.Flags |= PhysX.ConvexFlag.ComputeConvex;
convexMeshDesc.SetPositions(hull.Vertices);
if (hull.Indicies != null) convexMeshDesc.SetTriangles(hull.Indicies);
if (!convexMeshDesc.IsValid())
{
throw new PhysxSdkException("GenerateComplexPhysXShape: Convex mesh description is invalid");
}
using (MemoryStream ms = new MemoryStream())
{
if (!_cooking.CookConvexMesh(convexMeshDesc, ms))
{
throw new PhysxSdkException("GenerateComplexPhysXShape: CookConvexMesh() failed");
}
ms.Position = 0;
PhysX.ConvexMesh convexMesh = _scene.Physics.CreateConvexMesh(ms);
PhysX.ConvexMeshGeometry convexShapeGeom = new PhysX.ConvexMeshGeometry(convexMesh);
ret.Add(convexShapeGeom);
}
}
}
catch (Exception e)
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Unable to create convex hullset for shape: {0}", e);
result = _mesher.CreateMesh(primName, shape, size, LOD, ShapeType.SingleConvex, true);
if (result == null)
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Fallback hull generation failed, giving up", e);
return null;
}
//direct convex available?
if (result.ResultType == ShapeType.SingleConvex)
{
if (!TryGenerateFallbackHullFromHullData(ret, e, result.SingleConvex))
{
return null;
}
}
else if (result.ResultType == ShapeType.TriMesh)
{
IMesh mesh = result.TriMesh;
if (mesh == null)
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Fallback hull generation failed, giving up", e);
return null;
}
if (!TryGenerateFallbackHullFromTrimesh(ret, e, mesh))
{
return null;
}
}
}
return ret;
}
private bool TryGenerateFallbackHullFromHullData(List<PhysX.ConvexMeshGeometry> ret, Exception e, List<OpenMetaverse.Vector3> vertList)
{
PhysX.Math.Vector3[] verts = new PhysX.Math.Vector3[vertList.Count];
for (int i = 0; i < vertList.Count; i++)
{
verts[i] = PhysUtil.OmvVectorToPhysx(vertList[i]);
}
// fall back to basic convex hull
PhysX.ConvexMeshGeometry fallbackHull = this.GenerateBasicConvexHull(null, verts);
if (fallbackHull != null)
{
ret.Add(fallbackHull);
}
else
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Fallback hull generation failed, giving up", e);
return false;
}
return true;
}
private bool TryGenerateFallbackHullFromTrimesh(List<PhysX.ConvexMeshGeometry> ret, Exception e, IMesh mesh)
{
int[] indexes = mesh.getIndexListAsInt();
float[] verts = mesh.getVertexListAsFloat();
// fall back to basic convex hull
PhysX.ConvexMeshGeometry fallbackHull = this.GenerateBasicConvexHull(indexes, verts);
if (fallbackHull != null)
{
ret.Add(fallbackHull);
}
else
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Fallback hull generation failed, giving up", e);
return false;
}
return true;
}
private HacdConvexHull[] DecomposeWithRatcliff(PrimitiveBaseShape shape, float LOD, IMesh mesh)
{
List<int> indexes = mesh.getIndexListAsIntList();
List<float3> verts = mesh.getVertexListAsFloat3();
RatcliffACD rcAcd = new RatcliffACD();
HacdConvexHull[] hulls = rcAcd.DecomposeToConvexHulls(shape.GetMeshKey(OpenMetaverse.Vector3.One, LOD), true, verts, indexes);
return hulls;
}
private static HacdConvexHull[] DecomposeWithHACD(PrimitiveBaseShape shape, float LOD, IMesh mesh)
{
int[] indexes = mesh.getIndexListAsInt();
float[] verts = mesh.getVertexListAsFloat();
mesh.ReleaseSourceMeshData();
HacdPreset preset;
if (shape.SculptEntry)
{
preset = Hacd.SculptDefaultPreset;
}
else
{
preset = Hacd.PrimDefaultPreset;
}
//we cant use the hash we have here, as Hacd needs the mesh hash prescaled
HacdConvexHull[] hulls = Hacd.DecomposeToConvexHulls(shape.GetMeshKey(OpenMetaverse.Vector3.One, LOD), shape.SculptEntry == false, preset, verts, indexes);
return hulls;
}
private PhysX.ConvexMeshGeometry GenerateBasicConvexHull(string primName, PrimitiveBaseShape shape, OpenMetaverse.Vector3 size, float LOD, bool isDynamic)
{
MeshingResult result = _mesher.CreateMesh(primName, shape, size, LOD, ShapeType.SingleConvex, true);
if (result == null) return null;
IMesh mesh = result.TriMesh;
if (mesh == null) return null;
int[] indexes = mesh.getIndexListAsInt();
float[] verts = mesh.getVertexListAsFloat();
if (indexes.Length == 0 || verts.Length == 0)
{
return null;
}
return GenerateBasicConvexHull(indexes, verts);
}
private PhysX.ConvexMeshGeometry GenerateBasicConvexHull(int[] indexes, float[] verts)
{
try
{
PhysX.ConvexMeshDesc convexMeshDesc = new PhysX.ConvexMeshDesc()
{
Flags = PhysX.ConvexFlag.InflateConvex | PhysX.ConvexFlag.ComputeConvex
};
convexMeshDesc.SetPositions(PhysUtil.FloatArrayToVectorArray(verts));
if (indexes != null) convexMeshDesc.SetTriangles(indexes);
using (MemoryStream ms = new MemoryStream())
{
if (!_cooking.CookConvexMesh(convexMeshDesc, ms))
{
throw new PhysxSdkException("GenerateBasicConvexHull: CookConvexMesh() failed");
}
ms.Position = 0;
PhysX.ConvexMesh convexMesh = _scene.Physics.CreateConvexMesh(ms);
PhysX.ConvexMeshGeometry convexShapeGeom = new PhysX.ConvexMeshGeometry(convexMesh);
return convexShapeGeom;
}
}
catch (Exception e)
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Unable to fallback to convex hull for shape: {0}", e);
}
return null;
}
private PhysX.ConvexMeshGeometry GenerateBasicConvexHull(int[] indexes, PhysX.Math.Vector3[] verts)
{
try
{
PhysX.ConvexMeshDesc convexMeshDesc = new PhysX.ConvexMeshDesc()
{
Flags = PhysX.ConvexFlag.InflateConvex | PhysX.ConvexFlag.ComputeConvex
};
convexMeshDesc.SetPositions(verts);
if (indexes != null) convexMeshDesc.SetTriangles(indexes);
using (MemoryStream ms = new MemoryStream())
{
if (!_cooking.CookConvexMesh(convexMeshDesc, ms))
{
throw new PhysxSdkException("GenerateBasicConvexHull: CookConvexMesh() failed");
}
ms.Position = 0;
PhysX.ConvexMesh convexMesh = _scene.Physics.CreateConvexMesh(ms);
PhysX.ConvexMeshGeometry convexShapeGeom = new PhysX.ConvexMeshGeometry(convexMesh);
return convexShapeGeom;
}
}
catch (Exception e)
{
m_log.WarnFormat("[InWorldz.PhysxPhysics] Unable to fallback to convex hull for shape: {0}", e);
}
return null;
}
internal void Stop()
{
_stop = true;
lock (_meshWaitingQueue)
{
Monitor.Pulse(_meshWaitingQueue);
}
_runThread.Join();
}
public void Dispose()
{
_singleConvexHullCache.Dispose();
_acdHullCache.Dispose();
_trimeshCache.Dispose();
_cooking.Dispose();
_terrainMesher.Dispose();
}
internal void InformCachesToPerformDirectDeletes()
{
_singleConvexHullCache.BeginPerformingDirectDeletes();
_acdHullCache.BeginPerformingDirectDeletes();
_trimeshCache.BeginPerformingDirectDeletes();
}
private ShapeCache FindCorrespondingCache(ShapeType shapeType)
{
switch (shapeType)
{
case ShapeType.SingleConvex:
return _singleConvexHullCache;
case ShapeType.DecomposedConvexHulls:
return _acdHullCache;
case ShapeType.TriMesh:
return _trimeshCache;
}
return null;
}
internal bool TryGetCachedShape(ulong meshHash, OpenSim.Framework.PrimitiveBaseShape shape, bool isDynamic, out PhysicsShape phyShape)
{
ShapeCache cache = FindCorrespondingCache(ShapeDeterminer.FindBestShape(shape, isDynamic));
if (cache != null)
{
return cache.TryGetShape(meshHash, out phyShape);
}
phyShape = null;
return false;
}
internal void CacheShape(ulong meshHash, PhysicsShape phyShape, ShapeType bestFitShape)
{
ShapeCache cache = FindCorrespondingCache(bestFitShape);
cache.AddShape(meshHash, phyShape);
}
internal void UncacheShape(ulong meshHash, PhysicsShape shape, ShapeType shapeType)
{
ShapeCache cache = FindCorrespondingCache(shapeType);
cache.RemoveShape(meshHash, shape);
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
using Pathfinding;
// Empty namespace declaration to avoid errors in the free version
// Which does not have any classes in the RVO namespace
namespace Pathfinding.RVO {}
namespace Pathfinding {
using Pathfinding;
using Pathfinding.Util;
using Pathfinding.Serialization.JsonFx;
[System.Serializable]
/** Stores editor colors */
public class AstarColor {
public Color _NodeConnection;
public Color _UnwalkableNode;
public Color _BoundsHandles;
public Color _ConnectionLowLerp;
public Color _ConnectionHighLerp;
public Color _MeshEdgeColor;
public Color _MeshColor;
/** Holds user set area colors.
* Use GetAreaColor to get an area color */
public Color[] _AreaColors;
public static Color NodeConnection = new Color (1,1,1,0.9F);
public static Color UnwalkableNode = new Color (1,0,0,0.5F);
public static Color BoundsHandles = new Color (0.29F,0.454F,0.741F,0.9F);
public static Color ConnectionLowLerp = new Color (0,1,0,0.5F);
public static Color ConnectionHighLerp = new Color (1,0,0,0.5F);
public static Color MeshEdgeColor = new Color (0,0,0,0.5F);
public static Color MeshColor = new Color (0,0,0,0.5F);
/** Holds user set area colors.
* Use GetAreaColor to get an area color */
private static Color[] AreaColors;
/** Returns an color for an area, uses both user set ones and calculated.
* If the user has set a color for the area, it is used, but otherwise the color is calculated using Mathfx.IntToColor
* \see #AreaColors */
public static Color GetAreaColor (uint area) {
if (AreaColors == null || area >= AreaColors.Length) {
return AstarMath.IntToColor ((int)area,1F);
}
return AreaColors[(int)area];
}
/** Pushes all local variables out to static ones */
public void OnEnable () {
NodeConnection = _NodeConnection;
UnwalkableNode = _UnwalkableNode;
BoundsHandles = _BoundsHandles;
ConnectionLowLerp = _ConnectionLowLerp;
ConnectionHighLerp = _ConnectionHighLerp;
MeshEdgeColor = _MeshEdgeColor;
MeshColor = _MeshColor;
AreaColors = _AreaColors;
}
public AstarColor () {
_NodeConnection = new Color (1,1,1,0.9F);
_UnwalkableNode = new Color (1,0,0,0.5F);
_BoundsHandles = new Color (0.29F,0.454F,0.741F,0.9F);
_ConnectionLowLerp = new Color (0,1,0,0.5F);
_ConnectionHighLerp = new Color (1,0,0,0.5F);
_MeshEdgeColor = new Color (0,0,0,0.5F);
_MeshColor = new Color (0.125F, 0.686F, 0, 0.19F);
}
}
/** Returned by graph ray- or linecasts containing info about the hit. This will only be set up if something was hit. */
public struct GraphHitInfo {
/** Start of the line/ray */
public Vector3 origin;
/** Hit point */
public Vector3 point;
/** Node which contained the edge which was hit */
public GraphNode node;
/** Where the tangent starts. tangentOrigin and tangent together actually describes the edge which was hit */
public Vector3 tangentOrigin;
/** Tangent of the edge which was hit */
public Vector3 tangent;
public float distance {
get {
return (point-origin).magnitude;
}
}
public GraphHitInfo (Vector3 point) {
tangentOrigin = Vector3.zero;
origin = Vector3.zero;
this.point = point;
node = null;
tangent = Vector3.zero;
//this.distance = distance;
}
}
/** Nearest node constraint. Constrains which nodes will be returned by the GetNearest function */
public class NNConstraint {
/** Graphs treated as valid to search on.
* This is a bitmask meaning that bit 0 specifies whether or not the first graph in the graphs list should be able to be included in the search,
* bit 1 specifies whether or not the second graph should be included and so on.
* \code
* //Enables the first and third graphs to be included, but not the rest
* myNNConstraint.graphMask = (1 << 0) | (1 << 2);
* \endcode
* \note This does only affect which nodes are returned from a GetNearest call, if an invalid graph is linked to from a valid graph, it might be searched anyway.
*
* \see AstarPath.GetNearest */
public int graphMask = -1;
/** Only treat nodes in the area #area as suitable. Does not affect anything if #area is less than 0 (zero) */
public bool constrainArea;
/** Area ID to constrain to. Will not affect anything if less than 0 (zero) or if #constrainArea is false */
public int area = -1;
/** Only treat nodes with the walkable flag set to the same as #walkable as suitable */
public bool constrainWalkability = true;
/** What must the walkable flag on a node be for it to be suitable. Does not affect anything if #constrainWalkability if false */
public bool walkable = true;
/** if available, do an XZ check instead of checking on all axes. The RecastGraph supports this */
public bool distanceXZ;
/** Sets if tags should be constrained */
public bool constrainTags = true;
/** Nodes which have any of these tags set are suitable. This is a bitmask, i.e bit 0 indicates that tag 0 is good, bit 3 indicates tag 3 is good etc. */
public int tags = -1;
/** Constrain distance to node.
* Uses distance from AstarPath.maxNearestNodeDistance.
* If this is false, it will completely ignore the distance limit.
* \note This value is not used in this class, it is used by the AstarPath.GetNearest function.
*/
public bool constrainDistance = true;
/** Returns whether or not the graph conforms to this NNConstraint's rules.
* Note that only the first 31 graphs are considered using this function.
* If the graphMask has bit 31 set (i.e the last graph possible to fit in the mask), all graphs
* above index 31 will also be considered suitable.
*/
public virtual bool SuitableGraph (int graphIndex, NavGraph graph) {
return ((graphMask >> graphIndex) & 1) != 0;
}
/** Returns whether or not the node conforms to this NNConstraint's rules */
public virtual bool Suitable (GraphNode node) {
if (constrainWalkability && node.Walkable != walkable) return false;
if (constrainArea && area >= 0 && node.Area != area) return false;
if (constrainTags && ((tags >> (int)node.Tag) & 0x1) == 0) return false;
return true;
}
/** The default NNConstraint.
* Equivalent to new NNConstraint ().
* This NNConstraint has settings which works for most, it only finds walkable nodes
* and it constrains distance set by A* Inspector -> Settings -> Max Nearest Node Distance */
public static NNConstraint Default {
get {
return new NNConstraint ();
}
}
/** Returns a constraint which will not filter the results */
public static NNConstraint None {
get {
var n = new NNConstraint ();
n.constrainWalkability = false;
n.constrainArea = false;
n.constrainTags = false;
n.constrainDistance = false;
n.graphMask = -1;
return n;
}
}
/** Default constructor. Equals to the property #Default */
public NNConstraint () {
}
}
/** A special NNConstraint which can use different logic for the start node and end node in a path.
* A PathNNConstraint can be assigned to the Path.nnConstraint field, the path will first search for the start node, then it will call #SetStart and proceed with searching for the end node (nodes in the case of a MultiTargetPath).\n
* The default PathNNConstraint will constrain the end point to lie inside the same area as the start point.
*/
public class PathNNConstraint : NNConstraint {
public static new PathNNConstraint Default {
get {
var n = new PathNNConstraint ();
n.constrainArea = true;
return n;
}
}
/** Called after the start node has been found. This is used to get different search logic for the start and end nodes in a path */
public virtual void SetStart (GraphNode node) {
if (node != null) {
area = (int)node.Area;
} else {
constrainArea = false;
}
}
}
public struct NNInfo {
/** Closest node found.
* This node is not necessarily accepted by any NNConstraint passed.
* \see constrainedNode
*/
public GraphNode node;
/** Optional to be filled in.
* If the search will be able to find the constrained node without any extra effort it can fill it in. */
public GraphNode constrainedNode;
/** The position clamped to the closest point on the #node.
*/
public Vector3 clampedPosition;
/** Clamped position for the optional constrainedNode */
public Vector3 constClampedPosition;
public NNInfo (GraphNode node) {
this.node = node;
constrainedNode = null;
clampedPosition = Vector3.zero;
constClampedPosition = Vector3.zero;
UpdateInfo ();
}
/** Sets the constrained node */
public void SetConstrained (GraphNode constrainedNode, Vector3 clampedPosition) {
this.constrainedNode = constrainedNode;
constClampedPosition = clampedPosition;
}
/** Updates #clampedPosition and #constClampedPosition from node positions */
public void UpdateInfo () {
clampedPosition = node != null ? (Vector3)node.position : Vector3.zero;
constClampedPosition = constrainedNode != null ? (Vector3)constrainedNode.position : Vector3.zero;
}
public static explicit operator Vector3 (NNInfo ob) {
return ob.clampedPosition;
}
public static explicit operator GraphNode (NNInfo ob) {
return ob.node;
}
public static explicit operator NNInfo (GraphNode ob) {
return new NNInfo (ob);
}
}
/** Progress info for e.g a progressbar.
* Used by the scan functions in the project
* \see AstarPath.ScanLoop
*/
public struct Progress {
public readonly float progress;
public readonly string description;
public Progress (float p, string d) {
progress = p;
description = d;
}
public override string ToString () {
return progress.ToString("0.0") + " " + description;
}
}
/** Graphs which can be updated during runtime */
public interface IUpdatableGraph {
/** Updates an area using the specified GraphUpdateObject.
*
* Notes to implementators.
* This function should (in order):
* -# Call o.WillUpdateNode on the GUO for every node it will update, it is important that this is called BEFORE any changes are made to the nodes.
* -# Update walkabilty using special settings such as the usePhysics flag used with the GridGraph.
* -# Call Apply on the GUO for every node which should be updated with the GUO.
* -# Update eventual connectivity info if appropriate (GridGraphs updates connectivity, but most other graphs don't since then the connectivity cannot be recovered later).
*/
void UpdateArea (GraphUpdateObject o);
void UpdateAreaInit (GraphUpdateObject o);
GraphUpdateThreading CanUpdateAsync (GraphUpdateObject o);
}
[System.Serializable]
/** Holds a tagmask.
* This is used to store which tags to change and what to set them to in a Pathfinding.GraphUpdateObject.
* All variables are bitmasks.\n
* I wanted to make it a struct, but due to technical limitations when working with Unity's GenericMenu, I couldn't.
* So be wary of this when passing it as it will be passed by reference, not by value as e.g LayerMask.
*
* \deprecated This class is being phased out
*/
public class TagMask {
public int tagsChange;
public int tagsSet;
public TagMask () {}
public TagMask (int change, int set) {
tagsChange = change;
tagsSet = set;
}
public override string ToString () {
return ""+System.Convert.ToString (tagsChange,2)+"\n"+System.Convert.ToString (tagsSet,2);
}
}
/** Represents a collection of settings used to update nodes in a specific region of a graph.
* \see AstarPath.UpdateGraphs
* \see \ref graph-updates
*/
public class GraphUpdateObject {
/** The bounds to update nodes within.
* Defined in world space.
*/
public Bounds bounds;
/** Controlls if a flood fill will be carried out after this GUO has been applied.
* Disabling this can be used to gain a performance boost, but use with care.
* If you are sure that a GUO will not modify walkability or connections. You can set this to false.
* For example when only updating penalty values it can save processing power when setting this to false. Especially on large graphs.
* \note If you set this to false, even though it does change e.g walkability, it can lead to paths returning that they failed even though there is a path,
* or the try to search the whole graph for a path even though there is none, and will in the processes use wast amounts of processing power.
*
* If using the basic GraphUpdateObject (not a derived class), a quick way to check if it is going to need a flood fill is to check if #modifyWalkability is true or #updatePhysics is true.
*
*/
public bool requiresFloodFill = true;
/** Use physics checks to update nodes.
* When updating a grid graph and this is true, the nodes' position and walkability will be updated using physics checks
* with settings from "Collision Testing" and "Height Testing".
*
* When updating a PointGraph, setting this to true will make it re-evaluate all connections in the graph which passes through the #bounds.
* This has no effect when updating GridGraphs if #modifyWalkability is turned on.
*
* On RecastGraphs, having this enabled will trigger a complete recalculation of all tiles intersecting the bounds.
* This is quite slow (but powerful). If you only want to update e.g penalty on existing nodes, leave it disabled.
*/
public bool updatePhysics = true;
/** When #updatePhysics is true, GridGraphs will normally reset penalties, with this option you can override it.
* Good to use when you want to keep old penalties even when you update the graph.
*
* The images below shows two overlapping graph update objects, the right one happened to be applied before the left one. They both have updatePhysics = true and are
* set to increase the penalty of the nodes by some amount.
*
* The first image shows the result when resetPenaltyOnPhysics is false. Both penalties are added correctly.
* \shadowimage{resetPenaltyOnPhysics_False.png}
*
* This second image shows when resetPenaltyOnPhysics is set to true. The first GUO is applied correctly, but then the second one (the left one) is applied
* and during its updating, it resets the penalties first and then adds penalty to the nodes. The result is that the penalties from both GUOs are not added together.
* The green patch in at the border is there because physics recalculation (recalculation of the position of the node, checking for obstacles etc.) affects a slightly larger
* area than the original GUO bounds because of the Grid Graph -> Collision Testing -> Diameter setting (it is enlarged by that value). So some extra nodes have their penalties reset.
*
* \shadowimage{resetPenaltyOnPhysics_True.png}
*/
public bool resetPenaltyOnPhysics = true;
/** Update Erosion for GridGraphs.
* When enabled, erosion will be recalculated for grid graphs
* after the GUO has been applied.
*
* In the below image you can see the different effects you can get with the different values.\n
* The first image shows the graph when no GUO has been applied. The blue box is not identified as an obstacle by the graph, the reason
* there are unwalkable nodes around it is because there is a height difference (nodes are placed on top of the box) so erosion will be applied (an erosion value of 2 is used in this graph).
* The orange box is identified as an obstacle, so the area of unwalkable nodes around it is a bit larger since both erosion and collision has made
* nodes unwalkable.\n
* The GUO used simply sets walkability to true, i.e making all nodes walkable.
*
* \shadowimage{updateErosion.png}
*
* When updateErosion=True, the reason the blue box still has unwalkable nodes around it is because there is still a height difference
* so erosion will still be applied. The orange box on the other hand has no height difference and all nodes are set to walkable.\n
* \n
* When updateErosion=False, all nodes walkability are simply set to be walkable in this example.
*
* \see Pathfinding.GridGraph
*/
public bool updateErosion = true;
/** NNConstraint to use.
* The Pathfinding.NNConstraint.SuitableGraph function will be called on the NNConstraint to enable filtering of which graphs to update.\n
* \note As the Pathfinding.NNConstraint.SuitableGraph function is A* Pathfinding Project Pro only, this variable doesn't really affect anything in the free version.
*
*
* \astarpro */
public NNConstraint nnConstraint = NNConstraint.None;
/** Penalty to add to the nodes.
* A penalty of 1000 is equivalent to the cost of moving 1 world unit.
*/
public int addPenalty;
/** If true, all nodes' \a walkable variable will be set to #setWalkability */
public bool modifyWalkability;
/** If #modifyWalkability is true, the nodes' \a walkable variable will be set to this value */
public bool setWalkability;
/** If true, all nodes' \a tag will be set to #setTag */
public bool modifyTag;
/** If #modifyTag is true, all nodes' \a tag will be set to this value */
public int setTag;
/** Track which nodes are changed and save backup data.
* Used internally to revert changes if needed.
*/
public bool trackChangedNodes;
/** Nodes which were updated by this GraphUpdateObject.
* Will only be filled if #trackChangedNodes is true.
* \note It might take a few frames for graph update objects to be applied.
* If you need this info directly, use AstarPath.FlushGraphUpdates.
*/
public List<GraphNode> changedNodes;
private List<uint> backupData;
private List<Int3> backupPositionData;
/** A shape can be specified if a bounds object does not give enough precision */
public GraphUpdateShape shape;
/** Should be called on every node which is updated with this GUO before it is updated.
* \param node The node to save fields for. If null, nothing will be done
* \see #trackChangedNodes
*/
public virtual void WillUpdateNode (GraphNode node) {
if (trackChangedNodes && node != null) {
if (changedNodes == null) { changedNodes = ListPool<GraphNode>.Claim(); backupData = ListPool<uint>.Claim(); backupPositionData = ListPool<Int3>.Claim(); }
changedNodes.Add (node);
backupPositionData.Add (node.position);
backupData.Add (node.Penalty);
backupData.Add (node.Flags);
var gg = node as GridNode;
if ( gg != null ) backupData.Add (gg.InternalGridFlags);
}
}
/** Reverts penalties and flags (which includes walkability) on every node which was updated using this GUO.
* Data for reversion is only saved if #trackChangedNodes is true */
public virtual void RevertFromBackup () {
if (trackChangedNodes) {
if (changedNodes == null) return;
int counter = 0;
for (int i=0;i<changedNodes.Count;i++) {
changedNodes[i].Penalty = backupData[counter];
counter++;
changedNodes[i].Flags = backupData[counter];
counter++;
var gg = changedNodes[i] as GridNode;
if ( gg != null ) {
gg.InternalGridFlags = (ushort)backupData[counter];
counter++;
}
changedNodes[i].position = backupPositionData[i];
}
ListPool<GraphNode>.Release (changedNodes);
ListPool<uint>.Release(backupData);
ListPool<Int3>.Release(backupPositionData);
} else {
throw new System.InvalidOperationException ("Changed nodes have not been tracked, cannot revert from backup");
}
}
/** Updates the specified node using this GUO's settings */
public virtual void Apply (GraphNode node) {
if (shape == null || shape.Contains (node)) {
//Update penalty and walkability
node.Penalty = (uint)(node.Penalty+addPenalty);
if (modifyWalkability) {
node.Walkable = setWalkability;
}
//Update tags
if (modifyTag) node.Tag = (uint)setTag;
}
}
public GraphUpdateObject () {
}
/** Creates a new GUO with the specified bounds */
public GraphUpdateObject (Bounds b) {
bounds = b;
}
}
public interface IRaycastableGraph {
bool Linecast (Vector3 start, Vector3 end);
bool Linecast (Vector3 start, Vector3 end, GraphNode hint);
bool Linecast (Vector3 start, Vector3 end, GraphNode hint, out GraphHitInfo hit);
bool Linecast (Vector3 start, Vector3 end, GraphNode hint, out GraphHitInfo hit, List<GraphNode> trace);
}
/** Holds info about one pathfinding thread.
* Mainly used to send information about how the thread should execute when starting it
*/
public struct PathThreadInfo {
public readonly int threadIndex;
public readonly AstarPath astar;
public readonly PathHandler runData;
public PathThreadInfo (int index, AstarPath astar, PathHandler runData) {
this.threadIndex = index;
this.astar = astar;
this.runData = runData;
}
}
/** Integer Rectangle.
* Works almost like UnityEngine.Rect but with integer coordinates
*/
public struct IntRect {
public int xmin, ymin, xmax, ymax;
public IntRect (int xmin, int ymin, int xmax, int ymax) {
this.xmin = xmin;
this.xmax = xmax;
this.ymin = ymin;
this.ymax = ymax;
}
public bool Contains (int x, int y) {
return !(x < xmin || y < ymin || x > xmax || y > ymax);
}
public int Width {
get {
return xmax-xmin+1;
}
}
public int Height {
get {
return ymax-ymin+1;
}
}
/** Returns if this rectangle is valid.
* An invalid rect could have e.g xmin > xmax.
* Rectamgles with a zero area area invalid.
*/
public bool IsValid () {
return xmin <= xmax && ymin <= ymax;
}
public static bool operator == (IntRect a, IntRect b) {
return a.xmin == b.xmin && a.xmax == b.xmax && a.ymin == b.ymin && a.ymax == b.ymax;
}
public static bool operator != (IntRect a, IntRect b) {
return a.xmin != b.xmin || a.xmax != b.xmax || a.ymin != b.ymin || a.ymax != b.ymax;
}
public override bool Equals (System.Object _b) {
var b = (IntRect)_b;
return xmin == b.xmin && xmax == b.xmax && ymin == b.ymin && ymax == b.ymax;
}
public override int GetHashCode () {
return xmin*131071 ^ xmax*3571 ^ ymin*3109 ^ ymax*7;
}
/** Returns the intersection rect between the two rects.
* The intersection rect is the area which is inside both rects.
* If the rects do not have an intersection, an invalid rect is returned.
* \see IsValid
*/
public static IntRect Intersection (IntRect a, IntRect b) {
var r = new IntRect(
System.Math.Max(a.xmin,b.xmin),
System.Math.Max(a.ymin,b.ymin),
System.Math.Min(a.xmax,b.xmax),
System.Math.Min(a.ymax,b.ymax)
);
return r;
}
/** Returns if the two rectangles intersect each other
*/
public static bool Intersects (IntRect a, IntRect b) {
return !(a.xmin > b.xmax || a.ymin > b.ymax || a.xmax < b.xmin || a.ymax < b.ymin);
}
/** Returns a new rect which contains both input rects.
* This rectangle may contain areas outside both input rects as well in some cases.
*/
public static IntRect Union (IntRect a, IntRect b) {
var r = new IntRect(
System.Math.Min(a.xmin,b.xmin),
System.Math.Min(a.ymin,b.ymin),
System.Math.Max(a.xmax,b.xmax),
System.Math.Max(a.ymax,b.ymax)
);
return r;
}
/** Returns a new IntRect which is expanded to contain the point */
public IntRect ExpandToContain (int x, int y) {
var r = new IntRect(
System.Math.Min(xmin,x),
System.Math.Min(ymin,y),
System.Math.Max(xmax,x),
System.Math.Max(ymax,y)
);
return r;
}
/** Returns a new rect which is expanded by \a range in all directions.
* \param range How far to expand. Negative values are permitted.
*/
public IntRect Expand (int range) {
return new IntRect(xmin-range,
ymin-range,
xmax+range,
ymax+range
);
}
/** Matrices for rotation.
* Each group of 4 elements is a 2x2 matrix.
* The XZ position is multiplied by this.
* So
* \code
* //A rotation by 90 degrees clockwise, second matrix in the array
* (5,2) * ((0, 1), (-1, 0)) = (2,-5)
* \endcode
*/
private static readonly int[] Rotations = {
1, 0, //Identity matrix
0, 1,
0, 1,
-1, 0,
-1, 0,
0,-1,
0,-1,
1, 0
};
/** Returns a new rect rotated around the origin 90*r degrees.
* Ensures that a valid rect is returned.
*/
public IntRect Rotate ( int r ) {
int mx1 = Rotations[r*4+0];
int mx2 = Rotations[r*4+1];
int my1 = Rotations[r*4+2];
int my2 = Rotations[r*4+3];
int p1x = mx1*xmin + mx2*ymin;
int p1y = my1*xmin + my2*ymin;
int p2x = mx1*xmax + mx2*ymax;
int p2y = my1*xmax + my2*ymax;
return new IntRect (
System.Math.Min ( p1x, p2x ),
System.Math.Min ( p1y, p2y ),
System.Math.Max ( p1x, p2x ),
System.Math.Max ( p1y, p2y )
);
}
/** Returns a new rect which is offset by the specified amount.
*/
public IntRect Offset ( Int2 offset ) {
return new IntRect ( xmin+offset.x, ymin + offset.y, xmax + offset.x, ymax + offset.y );
}
/** Returns a new rect which is offset by the specified amount.
*/
public IntRect Offset ( int x, int y ) {
return new IntRect ( xmin+x, ymin + y, xmax + x, ymax + y );
}
public override string ToString () {
return "[x: "+xmin+"..."+xmax+", y: " + ymin +"..."+ymax+"]";
}
/** Draws some debug lines representing the rect */
public void DebugDraw (Matrix4x4 matrix, Color col) {
Vector3 p1 = matrix.MultiplyPoint3x4 (new Vector3(xmin,0,ymin));
Vector3 p2 = matrix.MultiplyPoint3x4 (new Vector3(xmin,0,ymax));
Vector3 p3 = matrix.MultiplyPoint3x4 (new Vector3(xmax,0,ymax));
Vector3 p4 = matrix.MultiplyPoint3x4 (new Vector3(xmax,0,ymin));
Debug.DrawLine (p1,p2,col);
Debug.DrawLine (p2,p3,col);
Debug.DrawLine (p3,p4,col);
Debug.DrawLine (p4,p1,col);
}
}
}
#region Delegates
/* Delegate with on Path object as parameter.
* This is used for callbacks when a path has finished calculation.\n
* Example function:
* \code
public void Start () {
//Assumes a Seeker component is attached to the GameObject
Seeker seeker = GetComponent<Seeker>();
//seeker.pathCallback is a OnPathDelegate, we add the function OnPathComplete to it so it will be called whenever a path has finished calculating on that seeker
seeker.pathCallback += OnPathComplete;
}
public void OnPathComplete (Path p) {
Debug.Log ("This is called when a path is completed on the seeker attached to this GameObject");
}\endcode
*/
public delegate void OnPathDelegate (Path p);
public delegate Vector3[] GetNextTargetDelegate (Path p, Vector3 currentPosition);
public delegate void NodeDelegate (GraphNode node);
public delegate void OnGraphDelegate (NavGraph graph);
public delegate void OnScanDelegate (AstarPath script);
public delegate void OnScanStatus (Progress progress);
#endregion
#region Enums
public enum GraphUpdateThreading {
UnityThread,
SeparateThread,
SeparateAndUnityInit
}
/** How path results are logged by the system */
public enum PathLog {
None, /**< Does not log anything */
Normal, /**< Logs basic info about the paths */
Heavy, /**< Includes additional info */
InGame, /**< Same as heavy, but displays the info in-game using GUI */
OnlyErrors /**< Same as normal, but logs only paths which returned an error */
}
/** Heuristic to use. Heuristic is the estimated cost from the current node to the target */
public enum Heuristic {
Manhattan,
DiagonalManhattan,
Euclidean,
None
}
/** What data to draw the graph debugging with */
public enum GraphDebugMode {
Areas,
G,
H,
F,
Penalty,
Connections,
Tags
}
/** Type of connection for a user placed link */
public enum ConnectionType {
Connection,
ModifyNode
}
public enum ThreadCount {
AutomaticLowLoad = -1,
AutomaticHighLoad = -2,
None = 0,
One = 1,
Two,
Three,
Four,
Five,
Six,
Seven,
Eight
}
public enum PathState {
Created = 0,
PathQueue = 1,
Processing = 2,
ReturnQueue = 3,
Returned = 4
}
public enum PathCompleteState {
NotCalculated = 0,
Error = 1,
Complete = 2,
Partial = 3
}
#endregion
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
namespace System.Collections.Immutable
{
/// <content>
/// Contains the inner <see cref="ImmutableSortedSet{T}.Builder"/> class.
/// </content>
[SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "Ignored")]
public sealed partial class ImmutableSortedSet<T>
{
/// <summary>
/// A sorted set that mutates with little or no memory allocations,
/// can produce and/or build on immutable sorted set instances very efficiently.
/// </summary>
/// <remarks>
/// <para>
/// While <see cref="ImmutableSortedSet{T}.Union"/> and other bulk change methods
/// already provide fast bulk change operations on the collection, this class allows
/// multiple combinations of changes to be made to a set with equal efficiency.
/// </para>
/// <para>
/// Instance members of this class are <em>not</em> thread-safe.
/// </para>
/// </remarks>
[SuppressMessage("Microsoft.Naming", "CA1710:IdentifiersShouldHaveCorrectSuffix", Justification = "Ignored")]
[SuppressMessage("Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Justification = "Ignored")]
[DebuggerDisplay("Count = {Count}")]
[DebuggerTypeProxy(typeof(ImmutableSortedSetBuilderDebuggerProxy<>))]
public sealed class Builder : ISortKeyCollection<T>, IReadOnlyCollection<T>, ISet<T>, ICollection
{
/// <summary>
/// The root of the binary tree that stores the collection. Contents are typically not entirely frozen.
/// </summary>
private ImmutableSortedSet<T>.Node _root = ImmutableSortedSet<T>.Node.EmptyNode;
/// <summary>
/// The comparer to use for sorting the set.
/// </summary>
private IComparer<T> _comparer = Comparer<T>.Default;
/// <summary>
/// Caches an immutable instance that represents the current state of the collection.
/// </summary>
/// <value>Null if no immutable view has been created for the current version.</value>
private ImmutableSortedSet<T> _immutable;
/// <summary>
/// A number that increments every time the builder changes its contents.
/// </summary>
private int _version;
/// <summary>
/// The object callers may use to synchronize access to this collection.
/// </summary>
private object _syncRoot;
/// <summary>
/// Initializes a new instance of the <see cref="Builder"/> class.
/// </summary>
/// <param name="set">A set to act as the basis for a new set.</param>
internal Builder(ImmutableSortedSet<T> set)
{
Requires.NotNull(set, nameof(set));
_root = set._root;
_comparer = set.KeyComparer;
_immutable = set;
}
#region ISet<T> Properties
/// <summary>
/// Gets the number of elements in this set.
/// </summary>
public int Count
{
get { return this.Root.Count; }
}
/// <summary>
/// Gets a value indicating whether this instance is read-only.
/// </summary>
/// <value>Always <c>false</c>.</value>
bool ICollection<T>.IsReadOnly
{
get { return false; }
}
#endregion
/// <summary>
/// Gets the element of the set at the given index.
/// </summary>
/// <param name="index">The 0-based index of the element in the set to return.</param>
/// <returns>The element at the given position.</returns>
/// <remarks>
/// No index setter is offered because the element being replaced may not sort
/// to the same position in the sorted collection as the replacing element.
/// </remarks>
public T this[int index]
{
get { return _root[index]; }
}
/// <summary>
/// Gets the maximum value in the collection, as defined by the comparer.
/// </summary>
/// <value>The maximum value in the set.</value>
public T Max
{
get { return _root.Max; }
}
/// <summary>
/// Gets the minimum value in the collection, as defined by the comparer.
/// </summary>
/// <value>The minimum value in the set.</value>
public T Min
{
get { return _root.Min; }
}
/// <summary>
/// Gets or sets the <see cref="IComparer{T}"/> object that is used to determine equality for the values in the <see cref="ImmutableSortedSet{T}"/>.
/// </summary>
/// <value>The comparer that is used to determine equality for the values in the set.</value>
/// <remarks>
/// When changing the comparer in such a way as would introduce collisions, the conflicting elements are dropped,
/// leaving only one of each matching pair in the collection.
/// </remarks>
public IComparer<T> KeyComparer
{
get
{
return _comparer;
}
set
{
Requires.NotNull(value, nameof(value));
if (value != _comparer)
{
var newRoot = Node.EmptyNode;
foreach (T item in this)
{
bool mutated;
newRoot = newRoot.Add(item, value, out mutated);
}
_immutable = null;
_comparer = value;
this.Root = newRoot;
}
}
}
/// <summary>
/// Gets the current version of the contents of this builder.
/// </summary>
internal int Version
{
get { return _version; }
}
/// <summary>
/// Gets or sets the root node that represents the data in this collection.
/// </summary>
private Node Root
{
get
{
return _root;
}
set
{
// We *always* increment the version number because some mutations
// may not create a new value of root, although the existing root
// instance may have mutated.
_version++;
if (_root != value)
{
_root = value;
// Clear any cached value for the immutable view since it is now invalidated.
_immutable = null;
}
}
}
#region ISet<T> Methods
/// <summary>
/// Adds an element to the current set and returns a value to indicate if the
/// element was successfully added.
/// </summary>
/// <param name="item">The element to add to the set.</param>
/// <returns>true if the element is added to the set; false if the element is already in the set.</returns>
public bool Add(T item)
{
bool mutated;
this.Root = this.Root.Add(item, _comparer, out mutated);
return mutated;
}
/// <summary>
/// Removes all elements in the specified collection from the current set.
/// </summary>
/// <param name="other">The collection of items to remove from the set.</param>
public void ExceptWith(IEnumerable<T> other)
{
Requires.NotNull(other, nameof(other));
foreach (T item in other)
{
bool mutated;
this.Root = this.Root.Remove(item, _comparer, out mutated);
}
}
/// <summary>
/// Modifies the current set so that it contains only elements that are also in a specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
public void IntersectWith(IEnumerable<T> other)
{
Requires.NotNull(other, nameof(other));
var result = ImmutableSortedSet<T>.Node.EmptyNode;
foreach (T item in other)
{
if (this.Contains(item))
{
bool mutated;
result = result.Add(item, _comparer, out mutated);
}
}
this.Root = result;
}
/// <summary>
/// Determines whether the current set is a proper (strict) subset of a specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
/// <returns>true if the current set is a correct subset of other; otherwise, false.</returns>
public bool IsProperSubsetOf(IEnumerable<T> other)
{
return this.ToImmutable().IsProperSubsetOf(other);
}
/// <summary>
/// Determines whether the current set is a proper (strict) superset of a specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
/// <returns>true if the current set is a superset of other; otherwise, false.</returns>
public bool IsProperSupersetOf(IEnumerable<T> other)
{
return this.ToImmutable().IsProperSupersetOf(other);
}
/// <summary>
/// Determines whether the current set is a subset of a specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
/// <returns>true if the current set is a subset of other; otherwise, false.</returns>
public bool IsSubsetOf(IEnumerable<T> other)
{
return this.ToImmutable().IsSubsetOf(other);
}
/// <summary>
/// Determines whether the current set is a superset of a specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
/// <returns>true if the current set is a superset of other; otherwise, false.</returns>
public bool IsSupersetOf(IEnumerable<T> other)
{
return this.ToImmutable().IsSupersetOf(other);
}
/// <summary>
/// Determines whether the current set overlaps with the specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
/// <returns>true if the current set and other share at least one common element; otherwise, false.</returns>
public bool Overlaps(IEnumerable<T> other)
{
return this.ToImmutable().Overlaps(other);
}
/// <summary>
/// Determines whether the current set and the specified collection contain the same elements.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
/// <returns>true if the current set is equal to other; otherwise, false.</returns>
public bool SetEquals(IEnumerable<T> other)
{
return this.ToImmutable().SetEquals(other);
}
/// <summary>
/// Modifies the current set so that it contains only elements that are present either in the current set or in the specified collection, but not both.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
public void SymmetricExceptWith(IEnumerable<T> other)
{
this.Root = this.ToImmutable().SymmetricExcept(other)._root;
}
/// <summary>
/// Modifies the current set so that it contains all elements that are present in both the current set and in the specified collection.
/// </summary>
/// <param name="other">The collection to compare to the current set.</param>
public void UnionWith(IEnumerable<T> other)
{
Requires.NotNull(other, nameof(other));
foreach (T item in other)
{
bool mutated;
this.Root = this.Root.Add(item, _comparer, out mutated);
}
}
/// <summary>
/// Adds an element to the current set and returns a value to indicate if the
/// element was successfully added.
/// </summary>
/// <param name="item">The element to add to the set.</param>
void ICollection<T>.Add(T item)
{
this.Add(item);
}
/// <summary>
/// Removes all elements from this set.
/// </summary>
public void Clear()
{
this.Root = ImmutableSortedSet<T>.Node.EmptyNode;
}
/// <summary>
/// Determines whether the set contains a specific value.
/// </summary>
/// <param name="item">The object to locate in the set.</param>
/// <returns>true if item is found in the set; false otherwise.</returns>
public bool Contains(T item)
{
return this.Root.Contains(item, _comparer);
}
/// <summary>
/// See <see cref="ICollection{T}"/>
/// </summary>
void ICollection<T>.CopyTo(T[] array, int arrayIndex)
{
_root.CopyTo(array, arrayIndex);
}
/// <summary>
/// Removes the first occurrence of a specific object from the set.
/// </summary>
/// <param name="item">The object to remove from the set.</param>
/// <returns><c>true</c> if the item was removed from the set; <c>false</c> if the item was not found in the set.</returns>
public bool Remove(T item)
{
bool mutated;
this.Root = this.Root.Remove(item, _comparer, out mutated);
return mutated;
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>A enumerator that can be used to iterate through the collection.</returns>
public ImmutableSortedSet<T>.Enumerator GetEnumerator()
{
return this.Root.GetEnumerator(this);
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>A enumerator that can be used to iterate through the collection.</returns>
IEnumerator<T> IEnumerable<T>.GetEnumerator()
{
return this.Root.GetEnumerator();
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>A enumerator that can be used to iterate through the collection.</returns>
IEnumerator IEnumerable.GetEnumerator()
{
return this.GetEnumerator();
}
#endregion
/// <summary>
/// Returns an <see cref="IEnumerable{T}"/> that iterates over this
/// collection in reverse order.
/// </summary>
/// <returns>
/// An enumerator that iterates over the <see cref="ImmutableSortedSet{T}.Builder"/>
/// in reverse order.
/// </returns>
[Pure]
public IEnumerable<T> Reverse()
{
return new ReverseEnumerable(_root);
}
/// <summary>
/// Creates an immutable sorted set based on the contents of this instance.
/// </summary>
/// <returns>An immutable set.</returns>
/// <remarks>
/// This method is an O(n) operation, and approaches O(1) time as the number of
/// actual mutations to the set since the last call to this method approaches 0.
/// </remarks>
public ImmutableSortedSet<T> ToImmutable()
{
// Creating an instance of ImmutableSortedSet<T> with our root node automatically freezes our tree,
// ensuring that the returned instance is immutable. Any further mutations made to this builder
// will clone (and unfreeze) the spine of modified nodes until the next time this method is invoked.
if (_immutable == null)
{
_immutable = ImmutableSortedSet<T>.Wrap(this.Root, _comparer);
}
return _immutable;
}
#region ICollection members
/// <summary>
/// Copies the elements of the <see cref="ICollection"/> to an <see cref="Array"/>, starting at a particular <see cref="Array"/> index.
/// </summary>
/// <param name="array">The one-dimensional <see cref="Array"/> that is the destination of the elements copied from <see cref="ICollection"/>. The <see cref="Array"/> must have zero-based indexing.</param>
/// <param name="arrayIndex">The zero-based index in <paramref name="array"/> at which copying begins.</param>
void ICollection.CopyTo(Array array, int arrayIndex)
{
this.Root.CopyTo(array, arrayIndex);
}
/// <summary>
/// Gets a value indicating whether access to the <see cref="ICollection"/> is synchronized (thread safe).
/// </summary>
/// <returns>true if access to the <see cref="ICollection"/> is synchronized (thread safe); otherwise, false.</returns>
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
bool ICollection.IsSynchronized
{
get { return false; }
}
/// <summary>
/// Gets an object that can be used to synchronize access to the <see cref="ICollection"/>.
/// </summary>
/// <returns>An object that can be used to synchronize access to the <see cref="ICollection"/>.</returns>
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
object ICollection.SyncRoot
{
get
{
if (_syncRoot == null)
{
Threading.Interlocked.CompareExchange<Object>(ref _syncRoot, new Object(), null);
}
return _syncRoot;
}
}
#endregion
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Threading;
using Umbraco.Core.Models;
using Umbraco.Core.Persistence.UnitOfWork;
namespace Umbraco.Core.Services
{
public class IdkMap
{
private readonly IDatabaseUnitOfWorkProvider _uowProvider;
private readonly ReaderWriterLockSlim _locker = new ReaderWriterLockSlim();
private readonly Dictionary<int, TypedId<Guid>> _id2Key = new Dictionary<int, TypedId<Guid>>();
private readonly Dictionary<Guid, TypedId<int>> _key2Id = new Dictionary<Guid, TypedId<int>>();
public IdkMap(IDatabaseUnitOfWorkProvider uowProvider)
{
_uowProvider = uowProvider;
}
// note - no need for uow, scope would be enough, but a pain to wire
// note - for pure read-only we might want to *not* enforce a transaction?
public Attempt<int> GetIdForKey(Guid key, UmbracoObjectTypes umbracoObjectType)
{
TypedId<int> id;
try
{
_locker.EnterReadLock();
if (_key2Id.TryGetValue(key, out id) && id.UmbracoObjectType == umbracoObjectType) return Attempt.Succeed(id.Id);
}
finally
{
if (_locker.IsReadLockHeld)
_locker.ExitReadLock();
}
int? val;
using (var uow = _uowProvider.GetUnitOfWork())
{
//if it's unknown don't include the nodeObjectType in the query
if (umbracoObjectType == UmbracoObjectTypes.Unknown)
{
val = uow.Database.ExecuteScalar<int?>("SELECT id FROM umbracoNode WHERE uniqueId=@id", new { id = key});
}
else
{
val = uow.Database.ExecuteScalar<int?>("SELECT id FROM umbracoNode WHERE uniqueId=@id AND (nodeObjectType=@type OR nodeObjectType=@reservation)",
new { id = key, type = GetNodeObjectTypeGuid(umbracoObjectType), reservation = Constants.ObjectTypes.IdReservationGuid });
}
uow.Commit();
}
if (val == null) return Attempt<int>.Fail();
// cache reservations, when something is saved this cache is cleared anyways
//if (umbracoObjectType == UmbracoObjectTypes.IdReservation)
// Attempt.Succeed(val.Value);
try
{
_locker.EnterWriteLock();
_id2Key[val.Value] = new TypedId<Guid>(key, umbracoObjectType);
_key2Id[key] = new TypedId<int>(val.Value, umbracoObjectType);
}
finally
{
if (_locker.IsWriteLockHeld)
_locker.ExitWriteLock();
}
return Attempt.Succeed(val.Value);
}
public Attempt<int> GetIdForUdi(Udi udi)
{
var guidUdi = udi as GuidUdi;
if (guidUdi == null)
return Attempt<int>.Fail();
var umbracoType = Constants.UdiEntityType.ToUmbracoObjectType(guidUdi.EntityType);
return GetIdForKey(guidUdi.Guid, umbracoType);
}
public Attempt<Guid> GetKeyForId(int id, UmbracoObjectTypes umbracoObjectType)
{
TypedId<Guid> key;
try
{
_locker.EnterReadLock();
if (_id2Key.TryGetValue(id, out key) && key.UmbracoObjectType == umbracoObjectType) return Attempt.Succeed(key.Id);
}
finally
{
if (_locker.IsReadLockHeld)
_locker.ExitReadLock();
}
Guid? val;
using (var uow = _uowProvider.GetUnitOfWork())
{
//if it's unknown don't include the nodeObjectType in the query
if (umbracoObjectType == UmbracoObjectTypes.Unknown)
{
val = uow.Database.ExecuteScalar<Guid?>("SELECT uniqueId FROM umbracoNode WHERE id=@id", new { id });
}
else
{
val = uow.Database.ExecuteScalar<Guid?>("SELECT uniqueId FROM umbracoNode WHERE id=@id AND (nodeObjectType=@type OR nodeObjectType=@reservation)",
new { id, type = GetNodeObjectTypeGuid(umbracoObjectType), reservation = Constants.ObjectTypes.IdReservationGuid });
}
uow.Commit();
}
if (val == null) return Attempt<Guid>.Fail();
// cache reservations, when something is saved this cache is cleared anyways
//if (umbracoObjectType == UmbracoObjectTypes.IdReservation)
// Attempt.Succeed(val.Value);
try
{
_locker.EnterWriteLock();
_id2Key[id] = new TypedId<Guid>(val.Value, umbracoObjectType);
_key2Id[val.Value] = new TypedId<int>(id, umbracoObjectType);
}
finally
{
if (_locker.IsWriteLockHeld)
_locker.ExitWriteLock();
}
return Attempt.Succeed(val.Value);
}
private static Guid GetNodeObjectTypeGuid(UmbracoObjectTypes umbracoObjectType)
{
var guid = umbracoObjectType.GetGuid();
if (guid == Guid.Empty)
throw new NotSupportedException("Unsupported object type (" + umbracoObjectType + ").");
return guid;
}
public void ClearCache()
{
try
{
_locker.EnterWriteLock();
_id2Key.Clear();
_key2Id.Clear();
}
finally
{
if (_locker.IsWriteLockHeld)
_locker.ExitWriteLock();
}
}
public void ClearCache(int id)
{
try
{
_locker.EnterWriteLock();
TypedId<Guid> key;
if (_id2Key.TryGetValue(id, out key) == false) return;
_id2Key.Remove(id);
_key2Id.Remove(key.Id);
}
finally
{
if (_locker.IsWriteLockHeld)
_locker.ExitWriteLock();
}
}
public void ClearCache(Guid key)
{
try
{
_locker.EnterWriteLock();
TypedId<int> id;
if (_key2Id.TryGetValue(key, out id) == false) return;
_id2Key.Remove(id.Id);
_key2Id.Remove(key);
}
finally
{
if (_locker.IsWriteLockHeld)
_locker.ExitWriteLock();
}
}
private struct TypedId<T>
{
private readonly T _id;
private readonly UmbracoObjectTypes _umbracoObjectType;
public T Id
{
get { return _id; }
}
public UmbracoObjectTypes UmbracoObjectType
{
get { return _umbracoObjectType; }
}
public TypedId(T id, UmbracoObjectTypes umbracoObjectType)
{
_umbracoObjectType = umbracoObjectType;
_id = id;
}
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Reactive.Linq;
using System.Threading.Tasks;
using Avalonia.Collections;
using Avalonia.Diagnostics;
using Avalonia.Data.Core;
using Avalonia.UnitTests;
using Xunit;
using Avalonia.Markup.Parsers;
namespace Avalonia.Base.UnitTests.Data.Core
{
public class ExpressionObserverTests_Indexer
{
[Fact]
public async Task Should_Get_Array_Value()
{
var data = new { Foo = new [] { "foo", "bar" } };
var target = ExpressionObserver.Create(data, x => x.Foo[1]);
var result = await target.Take(1);
Assert.Equal("bar", result);
GC.KeepAlive(data);
}
[Fact]
public async Task Should_Get_MultiDimensional_Array_Value()
{
var data = new { Foo = new[,] { { "foo", "bar" }, { "baz", "qux" } } };
var target = ExpressionObserver.Create(data, o => o.Foo[1, 1]);
var result = await target.Take(1);
Assert.Equal("qux", result);
GC.KeepAlive(data);
}
[Fact]
public async Task Should_Get_Value_For_String_Indexer()
{
var data = new { Foo = new Dictionary<string, string> { { "foo", "bar" }, { "baz", "qux" } } };
var target = ExpressionObserver.Create(data, o => o.Foo["foo"]);
var result = await target.Take(1);
Assert.Equal("bar", result);
GC.KeepAlive(data);
}
[Fact]
public async Task Should_Get_Value_For_Non_String_Indexer()
{
var data = new { Foo = new Dictionary<double, string> { { 1.0, "bar" }, { 2.0, "qux" } } };
var target = ExpressionObserver.Create(data, o => o.Foo[1.0]);
var result = await target.Take(1);
Assert.Equal("bar", result);
GC.KeepAlive(data);
}
[Fact]
public async Task Array_Out_Of_Bounds_Should_Return_UnsetValue()
{
var data = new { Foo = new[] { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[2]);
var result = await target.Take(1);
Assert.Equal(AvaloniaProperty.UnsetValue, result);
GC.KeepAlive(data);
}
[Fact]
public async Task List_Out_Of_Bounds_Should_Return_UnsetValue()
{
var data = new { Foo = new List<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[2]);
var result = await target.Take(1);
Assert.Equal(AvaloniaProperty.UnsetValue, result);
GC.KeepAlive(data);
}
[Fact]
public async Task Should_Get_List_Value()
{
var data = new { Foo = new List<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[1]);
var result = await target.Take(1);
Assert.Equal("bar", result);
GC.KeepAlive(data);
}
[Fact]
public void Should_Track_INCC_Add()
{
var data = new { Foo = new AvaloniaList<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[2]);
var result = new List<object>();
using (var sub = target.Subscribe(x => result.Add(x)))
{
data.Foo.Add("baz");
}
Assert.Equal(new[] { AvaloniaProperty.UnsetValue, "baz" }, result);
Assert.Null(((INotifyCollectionChangedDebug)data.Foo).GetCollectionChangedSubscribers());
GC.KeepAlive(data);
}
[Fact]
public void Should_Track_INCC_Remove()
{
var data = new { Foo = new AvaloniaList<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[0]);
var result = new List<object>();
using (var sub = target.Subscribe(x => result.Add(x)))
{
data.Foo.RemoveAt(0);
}
Assert.Equal(new[] { "foo", "bar" }, result);
Assert.Null(((INotifyCollectionChangedDebug)data.Foo).GetCollectionChangedSubscribers());
GC.KeepAlive(data);
}
[Fact]
public void Should_Track_INCC_Replace()
{
var data = new { Foo = new AvaloniaList<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[1]);
var result = new List<object>();
using (var sub = target.Subscribe(x => result.Add(x)))
{
data.Foo[1] = "baz";
}
Assert.Equal(new[] { "bar", "baz" }, result);
Assert.Null(((INotifyCollectionChangedDebug)data.Foo).GetCollectionChangedSubscribers());
GC.KeepAlive(data);
}
[Fact]
public void Should_Track_INCC_Move()
{
// Using ObservableCollection here because AvaloniaList does not yet have a Move
// method, but even if it did we need to test with ObservableCollection as well
// as AvaloniaList as it implements PropertyChanged as an explicit interface event.
var data = new { Foo = new ObservableCollection<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[1]);
var result = new List<object>();
var sub = target.Subscribe(x => result.Add(x));
data.Foo.Move(0, 1);
Assert.Equal(new[] { "bar", "foo" }, result);
GC.KeepAlive(sub);
GC.KeepAlive(data);
}
[Fact]
public void Should_Track_INCC_Reset()
{
var data = new { Foo = new AvaloniaList<string> { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[1]);
var result = new List<object>();
var sub = target.Subscribe(x => result.Add(x));
data.Foo.Clear();
Assert.Equal(new[] { "bar", AvaloniaProperty.UnsetValue }, result);
GC.KeepAlive(sub);
GC.KeepAlive(data);
}
[Fact]
public void Should_Track_NonIntegerIndexer()
{
var data = new { Foo = new NonIntegerIndexer() };
data.Foo["foo"] = "bar";
data.Foo["baz"] = "qux";
var target = ExpressionObserver.Create(data, o => o.Foo["foo"]);
var result = new List<object>();
using (var sub = target.Subscribe(x => result.Add(x)))
{
data.Foo["foo"] = "bar2";
}
var expected = new[] { "bar", "bar2" };
Assert.Equal(expected, result);
Assert.Equal(0, data.Foo.PropertyChangedSubscriptionCount);
GC.KeepAlive(data);
}
[Fact]
public void Should_SetArrayIndex()
{
var data = new { Foo = new[] { "foo", "bar" } };
var target = ExpressionObserver.Create(data, o => o.Foo[1]);
using (target.Subscribe(_ => { }))
{
Assert.True(target.SetValue("baz"));
}
Assert.Equal("baz", data.Foo[1]);
GC.KeepAlive(data);
}
[Fact]
public void Should_Set_ExistingDictionaryEntry()
{
var data = new
{
Foo = new Dictionary<string, int>
{
{"foo", 1 }
}
};
var target = ExpressionObserver.Create(data, o => o.Foo["foo"]);
using (target.Subscribe(_ => { }))
{
Assert.True(target.SetValue(4));
}
Assert.Equal(4, data.Foo["foo"]);
GC.KeepAlive(data);
}
[Fact]
public void Should_Add_NewDictionaryEntry()
{
var data = new
{
Foo = new Dictionary<string, int>
{
{"foo", 1 }
}
};
var target = ExpressionObserver.Create(data, o => o.Foo["bar"]);
using (target.Subscribe(_ => { }))
{
Assert.True(target.SetValue(4));
}
Assert.Equal(4, data.Foo["bar"]);
GC.KeepAlive(data);
}
[Fact]
public void Should_Set_NonIntegerIndexer()
{
var data = new { Foo = new NonIntegerIndexer() };
data.Foo["foo"] = "bar";
data.Foo["baz"] = "qux";
var target = ExpressionObserver.Create(data, o => o.Foo["foo"]);
using (target.Subscribe(_ => { }))
{
Assert.True(target.SetValue("bar2"));
}
Assert.Equal("bar2", data.Foo["foo"]);
GC.KeepAlive(data);
}
[Fact]
public async Task Indexer_Only_Binding_Works()
{
var data = new[] { 1, 2, 3 };
var target = ExpressionObserver.Create(data, o => o[1]);
var value = await target.Take(1);
Assert.Equal(data[1], value);
}
private class NonIntegerIndexer : NotifyingBase
{
private readonly Dictionary<string, string> _storage = new Dictionary<string, string>();
public string this[string key]
{
get
{
return _storage[key];
}
set
{
_storage[key] = value;
RaisePropertyChanged(CommonPropertyNames.IndexerName);
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsHttp
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// HttpFailure operations.
/// </summary>
public partial class HttpFailure : IServiceOperations<AutoRestHttpInfrastructureTestService>, IHttpFailure
{
/// <summary>
/// Initializes a new instance of the HttpFailure class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
public HttpFailure(AutoRestHttpInfrastructureTestService client)
{
if (client == null)
{
throw new ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestHttpInfrastructureTestService
/// </summary>
public AutoRestHttpInfrastructureTestService Client { get; private set; }
/// <summary>
/// Get empty error form server
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<bool?>> GetEmptyErrorWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetEmptyError", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "http/failure/emptybody/error").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<bool?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<bool?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get empty error form server
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<bool?>> GetNoModelErrorWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetNoModelError", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "http/failure/nomodel/error").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new HttpOperationException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<bool?>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<bool?>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
// -----------------------------------------------------------------------------------------
// <copyright file="CloudStorageAccountTests.cs" company="Microsoft">
// Copyright 2012 Microsoft Corporation
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
// -----------------------------------------------------------------------------------------
using System;
using Microsoft.WindowsAzure.Storage.Auth;
using Microsoft.WindowsAzure.Storage.Blob;
using Microsoft.WindowsAzure.Storage.Queue;
using Microsoft.WindowsAzure.Storage.Table;
#if RTMD
using Microsoft.VisualStudio.TestPlatform.UnitTestFramework;
#else
using Microsoft.VisualStudio.TestTools.UnitTesting;
#endif
namespace Microsoft.WindowsAzure.Storage.Core.Util
{
[TestClass]
public class CloudStorageAccountTests : TestBase
{
private string token = "?sp=abcde&sig=1";
[TestMethod]
/// [Description("Anonymous credentials")]
[TestCategory(ComponentCategory.Auth)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void StorageCredentialsAnonymous()
{
StorageCredentials cred = new StorageCredentials();
Assert.IsNull(cred.AccountName);
Assert.IsTrue(cred.IsAnonymous);
Assert.IsFalse(cred.IsSAS);
Assert.IsFalse(cred.IsSharedKey);
Uri testUri = new Uri("http://test/abc?querya=1");
Assert.AreEqual(testUri, cred.TransformUri(testUri));
byte[] dummyKey = { 0, 1, 2 };
string base64EncodedDummyKey = Convert.ToBase64String(dummyKey);
TestHelper.ExpectedException<InvalidOperationException>(
() => cred.UpdateKey(base64EncodedDummyKey, null),
"Updating shared key on an anonymous credentials instance should fail.");
}
[TestMethod]
/// [Description("Shared key credentials")]
[TestCategory(ComponentCategory.Auth)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void StorageCredentialsSharedKey()
{
StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey);
Assert.AreEqual(TestBase.TargetTenantConfig.AccountName, cred.AccountName, false);
Assert.IsFalse(cred.IsAnonymous);
Assert.IsFalse(cred.IsSAS);
Assert.IsTrue(cred.IsSharedKey);
Uri testUri = new Uri("http://test/abc?querya=1");
Assert.AreEqual(testUri, cred.TransformUri(testUri));
Assert.AreEqual(TestBase.TargetTenantConfig.AccountKey, Convert.ToBase64String(cred.ExportKey()));
byte[] dummyKey = { 0, 1, 2 };
string base64EncodedDummyKey = Convert.ToBase64String(dummyKey);
cred.UpdateKey(base64EncodedDummyKey, null);
Assert.AreEqual(base64EncodedDummyKey, Convert.ToBase64String(cred.ExportKey()));
#if !RTMD
dummyKey[0] = 3;
base64EncodedDummyKey = Convert.ToBase64String(dummyKey);
cred.UpdateKey(dummyKey, null);
Assert.AreEqual(base64EncodedDummyKey, Convert.ToBase64String(cred.ExportKey()));
#endif
}
[TestMethod]
/// [Description("SAS token credentials")]
[TestCategory(ComponentCategory.Auth)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void StorageCredentialsSAS()
{
StorageCredentials cred = new StorageCredentials(token);
Assert.IsNull(cred.AccountName);
Assert.IsFalse(cred.IsAnonymous);
Assert.IsTrue(cred.IsSAS);
Assert.IsFalse(cred.IsSharedKey);
Uri testUri = new Uri("http://test/abc");
Assert.AreEqual(testUri.AbsoluteUri + token, cred.TransformUri(testUri).AbsoluteUri, true);
testUri = new Uri("http://test/abc?query=a&query2=b");
string expectedUri = testUri.AbsoluteUri + "&" + token.Substring(1);
Assert.AreEqual(expectedUri, cred.TransformUri(testUri).AbsoluteUri, true);
byte[] dummyKey = { 0, 1, 2 };
string base64EncodedDummyKey = Convert.ToBase64String(dummyKey);
TestHelper.ExpectedException<InvalidOperationException>(
() => cred.UpdateKey(base64EncodedDummyKey, null),
"Updating shared key on a SAS credentials instance should fail.");
}
[TestMethod]
/// [Description("Compare credentials for equality")]
[TestCategory(ComponentCategory.Auth)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void StorageCredentialsEquality()
{
StorageCredentials credSharedKey1 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey);
StorageCredentials credSharedKey2 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey);
StorageCredentials credSharedKey3 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName + "1", TestBase.TargetTenantConfig.AccountKey);
StorageCredentials credSharedKey4 = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, Convert.ToBase64String(new byte[] { 0, 1, 2 }));
StorageCredentials credSAS1 = new StorageCredentials(token);
StorageCredentials credSAS2 = new StorageCredentials(token);
StorageCredentials credSAS3 = new StorageCredentials(token + "1");
StorageCredentials credAnonymous1 = new StorageCredentials();
StorageCredentials credAnonymous2 = new StorageCredentials();
Assert.IsTrue(credSharedKey1.Equals(credSharedKey2));
Assert.IsFalse(credSharedKey1.Equals(credSharedKey3));
Assert.IsFalse(credSharedKey1.Equals(credSharedKey4));
Assert.IsTrue(credSAS1.Equals(credSAS2));
Assert.IsFalse(credSAS1.Equals(credSAS3));
Assert.IsTrue(credAnonymous1.Equals(credAnonymous2));
Assert.IsFalse(credSharedKey1.Equals(credSAS1));
Assert.IsFalse(credSharedKey1.Equals(credAnonymous1));
Assert.IsFalse(credSAS1.Equals(credAnonymous1));
}
private void AccountsAreEqual(CloudStorageAccount a, CloudStorageAccount b)
{
// endpoints are the same
Assert.AreEqual(a.BlobEndpoint, b.BlobEndpoint);
Assert.AreEqual(a.QueueEndpoint, b.QueueEndpoint);
Assert.AreEqual(a.TableEndpoint, b.TableEndpoint);
// seralized representatons are the same.
string aToStringNoSecrets = a.ToString();
string aToStringWithSecrets = a.ToString(true);
string bToStringNoSecrets = b.ToString(false);
string bToStringWithSecrets = b.ToString(true);
Assert.AreEqual(aToStringNoSecrets, bToStringNoSecrets, false);
Assert.AreEqual(aToStringWithSecrets, bToStringWithSecrets, false);
// credentials are the same
if (a.Credentials != null && b.Credentials != null)
{
Assert.AreEqual(a.Credentials.IsAnonymous, b.Credentials.IsAnonymous);
Assert.AreEqual(a.Credentials.IsSAS, b.Credentials.IsSAS);
Assert.AreEqual(a.Credentials.IsSharedKey, b.Credentials.IsSharedKey);
// make sure
if (!a.Credentials.IsAnonymous &&
a.Credentials != CloudStorageAccount.DevelopmentStorageAccount.Credentials &&
b.Credentials != CloudStorageAccount.DevelopmentStorageAccount.Credentials)
{
Assert.AreNotEqual(aToStringWithSecrets, bToStringNoSecrets, true);
}
}
else if (a.Credentials == null && b.Credentials == null)
{
return;
}
else
{
Assert.Fail("credentials mismatch");
}
}
[TestMethod]
/// [Description("DevStore account")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevelopmentStorageAccount()
{
CloudStorageAccount devstoreAccount = CloudStorageAccount.DevelopmentStorageAccount;
Assert.AreEqual(devstoreAccount.BlobEndpoint, new Uri("http://127.0.0.1:10000/devstoreaccount1"));
Assert.AreEqual(devstoreAccount.QueueEndpoint, new Uri("http://127.0.0.1:10001/devstoreaccount1"));
Assert.AreEqual(devstoreAccount.TableEndpoint, new Uri("http://127.0.0.1:10002/devstoreaccount1"));
string devstoreAccountToStringNoSecrets = devstoreAccount.ToString();
string devstoreAccountToStringWithSecrets = devstoreAccount.ToString(true);
CloudStorageAccount testAccount = CloudStorageAccount.Parse(devstoreAccountToStringWithSecrets);
// make sure it round trips
AccountsAreEqual(testAccount, devstoreAccount);
CloudStorageAccount acct;
if (!CloudStorageAccount.TryParse(devstoreAccountToStringWithSecrets, out acct))
{
Assert.Fail("Expected TryParse success.");
}
}
[TestMethod]
/// [Description("Regular account with HTTP")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDefaultStorageAccountWithHttp()
{
StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey);
CloudStorageAccount cloudStorageAccount = new CloudStorageAccount(cred, false);
Assert.AreEqual(cloudStorageAccount.BlobEndpoint,
new Uri(String.Format("http://{0}.blob.core.windows.net", TestBase.TargetTenantConfig.AccountName)));
Assert.AreEqual(cloudStorageAccount.QueueEndpoint,
new Uri(String.Format("http://{0}.queue.core.windows.net", TestBase.TargetTenantConfig.AccountName)));
Assert.AreEqual(cloudStorageAccount.TableEndpoint,
new Uri(String.Format("http://{0}.table.core.windows.net", TestBase.TargetTenantConfig.AccountName)));
string cloudStorageAccountToStringNoSecrets = cloudStorageAccount.ToString();
string cloudStorageAccountToStringWithSecrets = cloudStorageAccount.ToString(true);
CloudStorageAccount testAccount = CloudStorageAccount.Parse(cloudStorageAccountToStringWithSecrets);
// make sure it round trips
AccountsAreEqual(testAccount, cloudStorageAccount);
}
[TestMethod]
/// [Description("Regular account with HTTPS")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDefaultStorageAccountWithHttps()
{
StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey);
CloudStorageAccount cloudStorageAccount = new CloudStorageAccount(cred, true);
Assert.AreEqual(cloudStorageAccount.BlobEndpoint,
new Uri(String.Format("https://{0}.blob.core.windows.net", TestBase.TargetTenantConfig.AccountName)));
Assert.AreEqual(cloudStorageAccount.QueueEndpoint,
new Uri(String.Format("https://{0}.queue.core.windows.net", TestBase.TargetTenantConfig.AccountName)));
Assert.AreEqual(cloudStorageAccount.TableEndpoint,
new Uri(String.Format("https://{0}.table.core.windows.net", TestBase.TargetTenantConfig.AccountName)));
string cloudStorageAccountToStringNoSecrets = cloudStorageAccount.ToString();
string cloudStorageAccountToStringWithSecrets = cloudStorageAccount.ToString(true);
CloudStorageAccount testAccount = CloudStorageAccount.Parse(cloudStorageAccountToStringWithSecrets);
// make sure it round trips
AccountsAreEqual(testAccount, cloudStorageAccount);
}
[TestMethod]
/// [Description("Service client creation methods")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountClientMethods()
{
CloudStorageAccount account = CloudStorageAccount.DevelopmentStorageAccount;
CloudBlobClient blob = account.CreateCloudBlobClient();
CloudQueueClient queue = account.CreateCloudQueueClient();
CloudTableClient table = account.CreateCloudTableClient();
// check endpoints
Assert.AreEqual(account.BlobEndpoint, blob.BaseUri, "Blob endpoint doesn't match account");
Assert.AreEqual(account.QueueEndpoint, queue.BaseUri, "Queue endpoint doesn't match account");
Assert.AreEqual(account.TableEndpoint, table.BaseUri, "Table endpoint doesn't match account");
// check creds
Assert.AreEqual(account.Credentials, blob.Credentials, "Blob creds don't match account");
Assert.AreEqual(account.Credentials, queue.Credentials, "Queue creds don't match account");
Assert.AreEqual(account.Credentials, table.Credentials, "Table creds don't match account");
}
[TestMethod]
/// [Description("Service client creation methods")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountClientUriVerify()
{
StorageCredentials cred = new StorageCredentials(TestBase.TargetTenantConfig.AccountName, TestBase.TargetTenantConfig.AccountKey);
CloudStorageAccount cloudStorageAccount = new CloudStorageAccount(cred, true);
CloudBlobClient blobClient = cloudStorageAccount.CreateCloudBlobClient();
CloudBlobContainer container = blobClient.GetContainerReference("container1");
Assert.AreEqual(cloudStorageAccount.BlobEndpoint.ToString() + "container1", container.Uri.ToString());
CloudQueueClient queueClient = cloudStorageAccount.CreateCloudQueueClient();
CloudQueue queue = queueClient.GetQueueReference("queue1");
Assert.AreEqual(cloudStorageAccount.QueueEndpoint.ToString() + "queue1", queue.Uri.ToString());
CloudTableClient tableClient = cloudStorageAccount.CreateCloudTableClient();
CloudTable table = tableClient.GetTableReference("table1");
Assert.AreEqual(cloudStorageAccount.TableEndpoint.ToString() + "table1", table.Uri.ToString());
}
[TestMethod]
/// [Description("TryParse should return false for invalid connection strings")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountTryParseNullEmpty()
{
CloudStorageAccount account;
// TryParse should not throw exception when passing in null or empty string
Assert.IsFalse(CloudStorageAccount.TryParse(null, out account));
Assert.IsFalse(CloudStorageAccount.TryParse(string.Empty, out account));
}
[TestMethod]
/// [Description("UseDevelopmentStorage=false should fail")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevStoreNonTrueFails()
{
CloudStorageAccount account;
Assert.IsFalse(CloudStorageAccount.TryParse("UseDevelopmentStorage=false", out account));
}
[TestMethod]
/// [Description("UseDevelopmentStorage should fail when used with an account name")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevStorePlusAccountFails()
{
CloudStorageAccount account;
Assert.IsFalse(CloudStorageAccount.TryParse("UseDevelopmentStorage=false;AccountName=devstoreaccount1", out account));
}
[TestMethod]
/// [Description("UseDevelopmentStorage should fail when used with a custom endpoint")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevStorePlusEndpointFails()
{
CloudStorageAccount account;
Assert.IsFalse(CloudStorageAccount.TryParse("UseDevelopmentStorage=false;BlobEndpoint=http://127.0.0.1:1000/devstoreaccount1", out account));
}
[TestMethod]
/// [Description("Custom endpoints")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDefaultEndpointOverride()
{
CloudStorageAccount account;
Assert.IsTrue(CloudStorageAccount.TryParse("DefaultEndpointsProtocol=http;BlobEndpoint=http://customdomain.com/;AccountName=asdf;AccountKey=123=", out account));
Assert.AreEqual(new Uri("http://customdomain.com/"), account.BlobEndpoint);
}
[TestMethod]
/// [Description("Use DevStore with a proxy")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevStoreProxyUri()
{
CloudStorageAccount account;
Assert.IsTrue(CloudStorageAccount.TryParse("UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://ipv4.fiddler", out account));
Assert.AreEqual(new Uri("http://ipv4.fiddler:10000/devstoreaccount1"), account.BlobEndpoint);
Assert.AreEqual(new Uri("http://ipv4.fiddler:10001/devstoreaccount1"), account.QueueEndpoint);
Assert.AreEqual(new Uri("http://ipv4.fiddler:10002/devstoreaccount1"), account.TableEndpoint);
}
[TestMethod]
/// [Description("ToString method for DevStore account should not return endpoint info")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevStoreRoundtrip()
{
string accountString = "UseDevelopmentStorage=true";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method for DevStore account with a proxy should not return endpoint info")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDevStoreProxyRoundtrip()
{
string accountString = "UseDevelopmentStorage=true;DevelopmentStorageProxyUri=http://ipv4.fiddler";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method for regular account should return the same connection string")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountDefaultCloudRoundtrip()
{
string accountString = "DefaultEndpointsProtocol=http;AccountName=test;AccountKey=abc=";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method for custom endpoints should return the same connection string")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountExplicitCloudRoundtrip()
{
string accountString = "BlobEndpoint=https://blobs/;AccountName=test;AccountKey=abc=";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method for anonymous credentials should return the same connection string")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountAnonymousRoundtrip()
{
string accountString = "BlobEndpoint=http://blobs/";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
CloudStorageAccount account = new CloudStorageAccount(null, new Uri("http://blobs/"), null, null);
AccountsAreEqual(account, CloudStorageAccount.Parse(account.ToString(true)));
}
[TestMethod]
/// [Description("Parse method should ignore empty values")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountEmptyValues()
{
string accountString = ";BlobEndpoint=http://blobs/;;AccountName=test;;AccountKey=abc=;";
string validAccountString = "BlobEndpoint=http://blobs/;AccountName=test;AccountKey=abc=";
Assert.AreEqual(validAccountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method with custom blob endpoint should return the same connection string")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountJustBlobToString()
{
string accountString = "BlobEndpoint=http://blobs/;AccountName=test;AccountKey=abc=";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method with custom queue endpoint should return the same connection string")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountJustQueueToString()
{
string accountString = "QueueEndpoint=http://queue/;AccountName=test;AccountKey=abc=";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("ToString method with custom table endpoint should return the same connection string")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountJustTableToString()
{
string accountString = "TableEndpoint=http://table/;AccountName=test;AccountKey=abc=";
Assert.AreEqual(accountString, CloudStorageAccount.Parse(accountString).ToString(true));
}
[TestMethod]
/// [Description("Exporting account key should be possible as a byte array")]
[TestCategory(ComponentCategory.Core)]
[TestCategory(TestTypeCategory.UnitTest)]
[TestCategory(SmokeTestCategory.NonSmoke)]
[TestCategory(TenantTypeCategory.DevStore), TestCategory(TenantTypeCategory.DevFabric), TestCategory(TenantTypeCategory.Cloud)]
public void CloudStorageAccountExportKey()
{
string accountKeyString = "abc2564=";
string accountString = "BlobEndpoint=http://blobs/;AccountName=test;AccountKey=" + accountKeyString;
CloudStorageAccount account = CloudStorageAccount.Parse(accountString);
StorageCredentials accountAndKey = (StorageCredentials)account.Credentials;
byte[] keyBytes = accountAndKey.ExportKey();
byte[] expectedKeyBytes = Convert.FromBase64String(accountKeyString);
for (int i = 0; i < expectedKeyBytes.Length; i++)
{
Assert.AreEqual(expectedKeyBytes[i], keyBytes[i]);
}
Assert.AreEqual(expectedKeyBytes.Length, keyBytes.Length);
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==
// <OWNER>[....]</OWNER>
//
//
// X509Utils.cs
//
namespace System.Security.Cryptography.X509Certificates {
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Security.Cryptography;
using System.Security.Cryptography.Xml;
using System.Security.Permissions;
using System.Text;
using _FILETIME = System.Runtime.InteropServices.ComTypes.FILETIME;
internal class X509Utils {
private X509Utils () {}
// this method maps X509RevocationFlag to crypto API flags.
internal static uint MapRevocationFlags (X509RevocationMode revocationMode, X509RevocationFlag revocationFlag) {
uint dwFlags = 0;
if (revocationMode == X509RevocationMode.NoCheck)
return dwFlags;
if (revocationMode == X509RevocationMode.Offline)
dwFlags |= CAPI.CERT_CHAIN_REVOCATION_CHECK_CACHE_ONLY;
if (revocationFlag == X509RevocationFlag.EndCertificateOnly)
dwFlags |= CAPI.CERT_CHAIN_REVOCATION_CHECK_END_CERT;
else if (revocationFlag == X509RevocationFlag.EntireChain)
dwFlags |= CAPI.CERT_CHAIN_REVOCATION_CHECK_CHAIN;
else
dwFlags |= CAPI.CERT_CHAIN_REVOCATION_CHECK_CHAIN_EXCLUDE_ROOT;
return dwFlags;
}
private static readonly char[] hexValues = {'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B', 'C', 'D', 'E', 'F'};
internal static string EncodeHexString (byte[] sArray) {
return EncodeHexString(sArray, 0, (uint) sArray.Length);
}
internal static string EncodeHexString (byte[] sArray, uint start, uint end) {
String result = null;
if (sArray != null) {
char[] hexOrder = new char[(end - start) * 2];
uint digit;
for (uint i = start, j = 0; i < end; i++) {
digit = (uint) ((sArray[i] & 0xf0) >> 4);
hexOrder[j++] = hexValues[digit];
digit = (uint) (sArray[i] & 0x0f);
hexOrder[j++] = hexValues[digit];
}
result = new String(hexOrder);
}
return result;
}
internal static string EncodeHexStringFromInt (byte[] sArray) {
return EncodeHexStringFromInt(sArray, 0, (uint) sArray.Length);
}
internal static string EncodeHexStringFromInt (byte[] sArray, uint start, uint end) {
String result = null;
if(sArray != null) {
char[] hexOrder = new char[(end - start) * 2];
uint i = end;
uint digit, j=0;
while (i-- > start) {
digit = (uint) (sArray[i] & 0xf0) >> 4;
hexOrder[j++] = hexValues[digit];
digit = (uint) (sArray[i] & 0x0f);
hexOrder[j++] = hexValues[digit];
}
result = new String(hexOrder);
}
return result;
}
internal static byte HexToByte (char val) {
if (val <= '9' && val >= '0')
return (byte) (val - '0');
else if (val >= 'a' && val <= 'f')
return (byte) ((val - 'a') + 10);
else if (val >= 'A' && val <= 'F')
return (byte) ((val - 'A') + 10);
else
return 0xFF;
}
internal static byte[] DecodeHexString (string s) {
string hexString = Utils.DiscardWhiteSpaces(s);
uint cbHex = (uint) hexString.Length / 2;
byte[] hex = new byte[cbHex];
int i = 0;
for (int index = 0; index < cbHex; index++) {
hex[index] = (byte) ((HexToByte(hexString[i]) << 4) | HexToByte(hexString[i+1]));
i += 2;
}
return hex;
}
[SecurityCritical]
internal static unsafe bool MemEqual (byte * pbBuf1, uint cbBuf1, byte * pbBuf2, uint cbBuf2) {
if (cbBuf1 != cbBuf2)
return false;
while (cbBuf1-- > 0) {
if (*pbBuf1++ != *pbBuf2++) {
return false;
}
}
return true;
}
[SecurityCritical]
internal static SafeLocalAllocHandle StringToAnsiPtr (string s) {
byte[] arr = new byte[s.Length + 1];
Encoding.ASCII.GetBytes(s, 0, s.Length, arr, 0);
SafeLocalAllocHandle pb = CAPI.LocalAlloc(CAPI.LMEM_FIXED, new IntPtr(arr.Length));
Marshal.Copy(arr, 0, pb.DangerousGetHandle(), arr.Length);
return pb;
}
[SecurityCritical]
internal static SafeCertContextHandle GetCertContext (X509Certificate2 certificate) {
SafeCertContextHandle safeCertContext = CAPI.CertDuplicateCertificateContext(certificate.Handle);
GC.KeepAlive(certificate);
return safeCertContext;
}
[SecurityCritical]
internal static bool GetPrivateKeyInfo (SafeCertContextHandle safeCertContext, ref CspParameters parameters) {
SafeLocalAllocHandle ptr = SafeLocalAllocHandle.InvalidHandle;
uint cbData = 0;
if (!CAPI.CAPISafe.CertGetCertificateContextProperty(safeCertContext,
CAPI.CERT_KEY_PROV_INFO_PROP_ID,
ptr,
ref cbData)) {
int dwErrorCode = Marshal.GetLastWin32Error();
if (dwErrorCode == CAPI.CRYPT_E_NOT_FOUND)
return false;
else
throw new CryptographicException(Marshal.GetLastWin32Error());
}
ptr = CAPI.LocalAlloc(CAPI.LMEM_FIXED, new IntPtr(cbData));
if (!CAPI.CAPISafe.CertGetCertificateContextProperty(safeCertContext,
CAPI.CERT_KEY_PROV_INFO_PROP_ID,
ptr,
ref cbData)) {
int dwErrorCode = Marshal.GetLastWin32Error();
if (dwErrorCode == CAPI.CRYPT_E_NOT_FOUND)
return false;
else
throw new CryptographicException(Marshal.GetLastWin32Error());
}
CAPI.CRYPT_KEY_PROV_INFO pKeyProvInfo = (CAPI.CRYPT_KEY_PROV_INFO) Marshal.PtrToStructure(ptr.DangerousGetHandle(), typeof(CAPI.CRYPT_KEY_PROV_INFO));
parameters.ProviderName = pKeyProvInfo.pwszProvName;
parameters.KeyContainerName = pKeyProvInfo.pwszContainerName;
parameters.ProviderType = (int) pKeyProvInfo.dwProvType;
parameters.KeyNumber = (int) pKeyProvInfo.dwKeySpec;
parameters.Flags = (CspProviderFlags) ((pKeyProvInfo.dwFlags & CAPI.CRYPT_MACHINE_KEYSET) == CAPI.CRYPT_MACHINE_KEYSET ? CspProviderFlags.UseMachineKeyStore : 0);
ptr.Dispose();
return true;
}
// this method create a memory store from a certificate collection
[SecurityCritical]
internal static SafeCertStoreHandle ExportToMemoryStore (X509Certificate2Collection collection) {
//
// We need to Assert all StorePermission flags since this is a memory store and we want
// semi-trusted code to be able to export certificates to a memory store.
//
StorePermission sp = new StorePermission(StorePermissionFlags.AllFlags);
sp.Assert();
SafeCertStoreHandle safeCertStoreHandle = SafeCertStoreHandle.InvalidHandle;
// we always want to use CERT_STORE_ENUM_ARCHIVED_FLAG since we want to preserve the collection in this operation.
// By default, Archived certificates will not be included.
safeCertStoreHandle = CAPI.CertOpenStore(new IntPtr(CAPI.CERT_STORE_PROV_MEMORY),
CAPI.X509_ASN_ENCODING | CAPI.PKCS_7_ASN_ENCODING,
IntPtr.Zero,
CAPI.CERT_STORE_ENUM_ARCHIVED_FLAG | CAPI.CERT_STORE_CREATE_NEW_FLAG,
null);
if (safeCertStoreHandle == null || safeCertStoreHandle.IsInvalid)
throw new CryptographicException(Marshal.GetLastWin32Error());
//
// We use CertAddCertificateLinkToStore to keep a link to the original store, so any property changes get
// applied to the original store. This has a limit of 99 links per cert context however.
//
foreach (X509Certificate2 x509 in collection) {
if (!CAPI.CertAddCertificateLinkToStore(safeCertStoreHandle,
X509Utils.GetCertContext(x509),
CAPI.CERT_STORE_ADD_ALWAYS,
SafeCertContextHandle.InvalidHandle))
throw new CryptographicException(Marshal.GetLastWin32Error());
}
return safeCertStoreHandle;
}
[SecuritySafeCritical]
internal static uint OidToAlgId (string value) {
SafeLocalAllocHandle pszOid = StringToAnsiPtr(value);
CAPI.CRYPT_OID_INFO pOIDInfo = CAPI.CryptFindOIDInfo(CAPI.CRYPT_OID_INFO_OID_KEY, pszOid, 0);
return pOIDInfo.Algid;
}
internal static bool IsSelfSigned (X509Chain chain) {
X509ChainElementCollection elements = chain.ChainElements;
if (elements.Count != 1)
return false;
X509Certificate2 certificate = elements[0].Certificate;
if (String.Compare(certificate.SubjectName.Name, certificate.IssuerName.Name, StringComparison.OrdinalIgnoreCase) == 0)
return true;
return false;
}
[SecurityCritical]
internal static SafeLocalAllocHandle CopyOidsToUnmanagedMemory (OidCollection oids) {
SafeLocalAllocHandle safeLocalAllocHandle = SafeLocalAllocHandle.InvalidHandle;
if (oids == null || oids.Count == 0)
return safeLocalAllocHandle;
int ptrSize = oids.Count * Marshal.SizeOf(typeof(IntPtr));
int oidSize = 0;
foreach (Oid oid in oids) {
oidSize += (oid.Value.Length + 1);
}
safeLocalAllocHandle = CAPI.LocalAlloc(CAPI.LPTR, new IntPtr((uint) ptrSize + (uint) oidSize));
IntPtr pOid = new IntPtr((long)safeLocalAllocHandle.DangerousGetHandle() + ptrSize);
for (int index=0; index < oids.Count; index++) {
Marshal.WriteIntPtr(new IntPtr((long) safeLocalAllocHandle.DangerousGetHandle() + index * Marshal.SizeOf(typeof(IntPtr))), pOid);
byte[] ansiOid = Encoding.ASCII.GetBytes(oids[index].Value);
Marshal.Copy(ansiOid, 0, pOid, ansiOid.Length);
pOid = new IntPtr((long) pOid + oids[index].Value.Length + 1);
}
return safeLocalAllocHandle;
}
//
// Builds a certificate chain.
//
[SecurityCritical]
internal static X509Certificate2Collection GetCertificates(SafeCertStoreHandle safeCertStoreHandle) {
X509Certificate2Collection collection = new X509Certificate2Collection();
IntPtr pEnumContext = CAPI.CertEnumCertificatesInStore(safeCertStoreHandle, IntPtr.Zero);
while (pEnumContext != IntPtr.Zero) {
X509Certificate2 certificate = new X509Certificate2(pEnumContext);
collection.Add(certificate);
pEnumContext = CAPI.CertEnumCertificatesInStore(safeCertStoreHandle, pEnumContext);
}
return collection;
}
[SecurityCritical]
internal static unsafe int BuildChain (IntPtr hChainEngine,
SafeCertContextHandle pCertContext,
X509Certificate2Collection extraStore,
OidCollection applicationPolicy,
OidCollection certificatePolicy,
X509RevocationMode revocationMode,
X509RevocationFlag revocationFlag,
DateTime verificationTime,
TimeSpan timeout,
ref SafeCertChainHandle ppChainContext) {
if (pCertContext == null || pCertContext.IsInvalid)
throw new ArgumentException(SecurityResources.GetResourceString("Cryptography_InvalidContextHandle"), "pCertContext");
SafeCertStoreHandle hCertStore = SafeCertStoreHandle.InvalidHandle;
if (extraStore != null && extraStore.Count > 0)
hCertStore = X509Utils.ExportToMemoryStore(extraStore);
CAPI.CERT_CHAIN_PARA ChainPara = new CAPI.CERT_CHAIN_PARA();
// Initialize the structure size.
ChainPara.cbSize = (uint) Marshal.SizeOf(ChainPara);
// Application policy
SafeLocalAllocHandle applicationPolicyHandle = SafeLocalAllocHandle.InvalidHandle;
if (applicationPolicy != null && applicationPolicy.Count > 0) {
ChainPara.RequestedUsage.dwType = CAPI.USAGE_MATCH_TYPE_AND;
ChainPara.RequestedUsage.Usage.cUsageIdentifier = (uint) applicationPolicy.Count;
applicationPolicyHandle = X509Utils.CopyOidsToUnmanagedMemory(applicationPolicy);
ChainPara.RequestedUsage.Usage.rgpszUsageIdentifier = applicationPolicyHandle.DangerousGetHandle();
}
// Certificate policy
SafeLocalAllocHandle certificatePolicyHandle = SafeLocalAllocHandle.InvalidHandle;
if (certificatePolicy != null && certificatePolicy.Count > 0) {
ChainPara.RequestedIssuancePolicy.dwType = CAPI.USAGE_MATCH_TYPE_AND;
ChainPara.RequestedIssuancePolicy.Usage.cUsageIdentifier = (uint) certificatePolicy.Count;
certificatePolicyHandle = X509Utils.CopyOidsToUnmanagedMemory(certificatePolicy);
ChainPara.RequestedIssuancePolicy.Usage.rgpszUsageIdentifier = certificatePolicyHandle.DangerousGetHandle();
}
ChainPara.dwUrlRetrievalTimeout = (uint) timeout.Milliseconds;
_FILETIME ft = new _FILETIME();
*((long*) &ft) = verificationTime.ToFileTime();
uint flags = X509Utils.MapRevocationFlags(revocationMode, revocationFlag);
// Build the chain.
if (!CAPI.CAPISafe.CertGetCertificateChain(hChainEngine,
pCertContext,
ref ft,
hCertStore,
ref ChainPara,
flags,
IntPtr.Zero,
ref ppChainContext))
return Marshal.GetHRForLastWin32Error();
applicationPolicyHandle.Dispose();
certificatePolicyHandle.Dispose();
return CAPI.S_OK;
}
//
// Verifies whether a certificate is valid for the specified policy.
// S_OK means the certificate is valid for the specified policy.
// S_FALSE means the certificate is invalid for the specified policy.
// Anything else is an error.
//
[SecurityCritical]
internal static unsafe int VerifyCertificate (SafeCertContextHandle pCertContext,
OidCollection applicationPolicy,
OidCollection certificatePolicy,
X509RevocationMode revocationMode,
X509RevocationFlag revocationFlag,
DateTime verificationTime,
TimeSpan timeout,
X509Certificate2Collection extraStore,
IntPtr pszPolicy,
IntPtr pdwErrorStatus) {
if (pCertContext == null || pCertContext.IsInvalid)
throw new ArgumentException("pCertContext");
CAPI.CERT_CHAIN_POLICY_PARA PolicyPara = new CAPI.CERT_CHAIN_POLICY_PARA(Marshal.SizeOf(typeof(CAPI.CERT_CHAIN_POLICY_PARA)));
CAPI.CERT_CHAIN_POLICY_STATUS PolicyStatus = new CAPI.CERT_CHAIN_POLICY_STATUS(Marshal.SizeOf(typeof(CAPI.CERT_CHAIN_POLICY_STATUS)));
// Build the chain.
SafeCertChainHandle pChainContext = SafeCertChainHandle.InvalidHandle;
int hr = X509Utils.BuildChain(new IntPtr(CAPI.HCCE_CURRENT_USER),
pCertContext,
extraStore,
applicationPolicy,
certificatePolicy,
revocationMode,
revocationFlag,
verificationTime,
timeout,
ref pChainContext);
if (hr != CAPI.S_OK)
return hr;
// Verify the chain using the specified policy.
if (CAPI.CAPISafe.CertVerifyCertificateChainPolicy(pszPolicy, pChainContext, ref PolicyPara, ref PolicyStatus)) {
if (pdwErrorStatus != IntPtr.Zero)
*(uint*) pdwErrorStatus = PolicyStatus.dwError;
if (PolicyStatus.dwError != 0)
return CAPI.S_FALSE;
} else {
// The API failed.
return Marshal.GetHRForLastWin32Error();
}
return CAPI.S_OK;
}
}
}
| |
using System.Collections.Generic;
using System.IO;
using UnityEngine;
using UnityEditor;
using UnityEditor.Callbacks;
namespace UnityEditor.MATEditor
{
public static class MATPostBuildTrigger
{
// Frameworks Ids - These ids have been generated by creating a project using Xcode then
// extracting the values from the generated project.pbxproj. The format of this
// file is not documented by Apple so the correct algorithm for generating these
// ids is unknown. They also differ from project to project, so it may not matter.
const string FRAMEWORK_CORETELEPHONY = "CoreTelephony.framework";
const string FRAMEWORK_ID_CORETELEPHONY = "4266CDD118907E7C00C4E70B";
const string FRAMEWORK_FILEREFID_CORETELEPHONY = "4266CDD018907E7C00C4E70B";
const string FRAMEWORK_IAD = "iAd.framework";
const string FRAMEWORK_ID_IAD = "4266CE1C18907F5400C4E70B";
const string FRAMEWORK_FILEREFID_IAD = "4266CE1B18907F5400C4E70B";
const string FRAMEWORK_MOBILECORESERVICES = "MobileCoreServices.framework";
const string FRAMEWORK_ID_MOBILECORESERVICES = "4266CDD318907E8500C4E70B";
const string FRAMEWORK_FILEREFID_MOBILECORESERVICES = "4266CDD218907E8500C4E70B";
const string FRAMEWORK_STOREKIT = "StoreKit.framework";
const string FRAMEWORK_ID_STOREKIT = "4266CDD118907E8F00C4E70B";
const string FRAMEWORK_FILEREFID_STOREKIT = "4266CDD018907E8F00C4E70B";
const string FRAMEWORK_SYSTEMCONFIGURATION = "SystemConfiguration.framework";
const string FRAMEWORK_ID_SYSTEMCONFIGURATION = "4266CDD518907E8F00C4E70B";
const string FRAMEWORK_FILEREFID_SYSTEMCONFIGURATION = "4266CDD418907E8F00C4E70B";
const string DEFAULT_FRAMEWORKS_FOLDER = "System/Library/Frameworks";
const string DEFAULT_UNITY_IPHONE_PROJECT_NAME = "Unity-iPhone.xcodeproj";
// custom class: framework
public struct framework
{
public string sName;
public string sId;
public string sFileId;
public string sPath;
public bool sWeak;
public framework(string name, string myId, string fileid, string path, bool weak)
{
sName = name;
sId = myId;
sFileId = fileid;
sPath = path;
sWeak = weak;
}
}
/// Processbuild Function
[PostProcessBuild] // this attribute causes the method to be automatically executed
public static void OnPostProcessBuild(BuildTarget target, string path)
{
Debug.Log("New Post Processing Build: OnPostProcessBuild: path = " + path);
Debug.Log("OnPostProcessBuild - START");
// 1: Proceed only if this is an iOS build
#if UNITY_IPHONE
string xcodeprojPath = Path.Combine(path, DEFAULT_UNITY_IPHONE_PROJECT_NAME);
Debug.Log("We found xcodeprojPath to be : " + xcodeprojPath);
Dictionary<string, framework> dictFrameworks = new Dictionary<string, framework> ();
// List of all the frameworks to be added to the project
dictFrameworks.Add(FRAMEWORK_CORETELEPHONY, new framework (FRAMEWORK_CORETELEPHONY, FRAMEWORK_ID_CORETELEPHONY, FRAMEWORK_FILEREFID_CORETELEPHONY, null, false));
dictFrameworks.Add(FRAMEWORK_IAD, new framework (FRAMEWORK_IAD, FRAMEWORK_ID_IAD, FRAMEWORK_FILEREFID_IAD, null, false));
dictFrameworks.Add(FRAMEWORK_MOBILECORESERVICES, new framework (FRAMEWORK_MOBILECORESERVICES, FRAMEWORK_ID_MOBILECORESERVICES, FRAMEWORK_FILEREFID_MOBILECORESERVICES, null, false));
dictFrameworks.Add(FRAMEWORK_STOREKIT, new framework (FRAMEWORK_STOREKIT, FRAMEWORK_ID_STOREKIT, FRAMEWORK_FILEREFID_STOREKIT, null, false));
dictFrameworks.Add(FRAMEWORK_SYSTEMCONFIGURATION, new framework (FRAMEWORK_SYSTEMCONFIGURATION, FRAMEWORK_ID_SYSTEMCONFIGURATION, FRAMEWORK_FILEREFID_SYSTEMCONFIGURATION, null, false));
// 2: process our project
updateXcodeProject(xcodeprojPath, dictFrameworks);
#else
Debug.Log("OnPostProcessBuild - Warning: No PostProcessing required. This is not an iOS build.");
#endif
Debug.Log("OnPostProcessBuild - END");
}
// MAIN FUNCTION
// xcodeproj_filename - filename of the Xcode project to change
// frameworks - list of Apple standard frameworks to add to the project
public static void updateXcodeProject(string xcodeprojPath, Dictionary<string, framework> dictFrameworks)
{
// STEP 1 :
// Create an array of strings by reading in all lines from the xcode project file.
string project = xcodeprojPath + "/project.pbxproj";
string[] lines = System.IO.File.ReadAllLines(project);
// STEP 2 :
// Loop through the project file text and find out if all the required frameworks already exist.
int i = 0;
bool bFound = false;
bool bEnd = false;
bool existsCORETELEPHONY = false;
bool existsIAD = false;
bool existsMOBILECORESERVICES = false;
bool existsSTOREKIT = false;
bool existsSYSTEMCONFIGURATION = false;
Debug.Log ("total frameworks required = " + dictFrameworks.Count);
while (!bFound && !bEnd)
{
if (lines[i].Length > 5 && (string.Compare(lines[i].Substring(3, 3), "End") == 0) )
bEnd = true;
if (lines [i].Contains (FRAMEWORK_CORETELEPHONY)) {
existsCORETELEPHONY = true;
dictFrameworks.Remove (FRAMEWORK_CORETELEPHONY);
}
else if (lines [i].Contains (FRAMEWORK_IAD)) {
existsIAD = true;
dictFrameworks.Remove (FRAMEWORK_IAD);
}
else if (lines [i].Contains (FRAMEWORK_MOBILECORESERVICES)) {
existsMOBILECORESERVICES = true;
dictFrameworks.Remove (FRAMEWORK_MOBILECORESERVICES);
}
else if (lines [i].Contains (FRAMEWORK_STOREKIT)) {
existsSTOREKIT = true;
dictFrameworks.Remove (FRAMEWORK_STOREKIT);
}
else if (lines [i].Contains (FRAMEWORK_SYSTEMCONFIGURATION)) {
existsSYSTEMCONFIGURATION = true;
dictFrameworks.Remove (FRAMEWORK_SYSTEMCONFIGURATION);
}
bFound = existsCORETELEPHONY && existsIAD && existsMOBILECORESERVICES && existsSTOREKIT && existsSYSTEMCONFIGURATION;
++i;
}
Debug.Log ("frameworks to add = " + dictFrameworks.Count);
if (bFound)
{
Debug.Log("OnPostProcessBuild - WARNING: The frameworks required by MobileAppTracker are already present in the Xcode project. Nothing to add.");
}
else
{
// STEP 3 :
// Edit the project.pbxproj and include the missing frameworks required by MobileAppTracker.
FileStream filestr = new FileStream(project, FileMode.Create); //Create new file and open it for read and write, if the file exists overwrite it.
filestr.Close();
StreamWriter fCurrentXcodeProjFile = new StreamWriter(project); // will be used for writing
// As we iterate through the list we'll record which section of the
// project.pbxproj we are currently in
string section = string.Empty;
// We use this boolean to decide whether we have already added the list of
// build files to the link line. This is needed because there could be multiple
// build targets and they are not named in the project.pbxproj
bool bFrameworks_build_added = false;
i = 0;
foreach (string line in lines)
{
//////////////////////////////
// STEP 1 : Build Options //
//////////////////////////////
if (section == "XCBuildConfiguration"
&& line.StartsWith("\t\t\t\tOTHER_CFLAGS"))
{
Debug.Log("OnPostProcessBuild - Adding FRAMEWORK_SEARCH_PATHS");
// Add "." to the framework search path
fCurrentXcodeProjFile.Write("\t\t\t\tFRAMEWORK_SEARCH_PATHS = (\n\t\t\t\t\t\"$(inherited)\",\n\t\t\t\t\t\"\\\"$(SRCROOT)/" + "." + "\\\"\",\n\t\t\t\t);\n");
}
fCurrentXcodeProjFile.WriteLine(line);
//////////////////////////////////
// STEP 2 : Include Frameworks //
//////////////////////////////////
// Each section starts with a comment such as : /* Begin PBXBuildFile section */'
if ( line.Length > 7 && string.Compare(line.Substring(3, 5), "Begin") == 0 )
{
section = line.Split(' ')[2];
Debug.Log("NEW_SECTION: " + section);
if (section == "PBXBuildFile")
{
// Add one entry for each framework to the PBXBuildFile section
// Loop over pairs with foreach
foreach (KeyValuePair<string, framework> pair in dictFrameworks)
{
framework fr = pair.Value;
add_build_file(fCurrentXcodeProjFile, fr.sId, fr.sName, fr.sFileId, fr.sWeak);
}
}
if (section == "PBXFileReference")
{
// Add one entry for each framework to the PBXFileReference section
// Loop over pairs with foreach
foreach (KeyValuePair<string, framework> pair in dictFrameworks)
{
framework fr = pair.Value;
add_framework_file_reference(fCurrentXcodeProjFile, fr.sFileId, fr.sName, fr.sPath);
}
}
if (line.Length > 5 && string.Compare(line.Substring(3, 3), "End") == 0)
{
section = string.Empty;
}
}
// The PBXResourcesBuildPhase section is what appears in XCode as 'Link
// Binary With Libraries'. As with the frameworks we make the assumption the
// first target is always 'Unity-iPhone' as the name of the target itself is
// not listed in project.pbxproj
if (section == "PBXFrameworksBuildPhase"
&& line.Trim().Length > 4
&& string.Compare(line.Trim().Substring(0, 5) , "files") == 0
&& !bFrameworks_build_added)
{
// Add one entry for each framework to the PBXFrameworksBuildPhase section
// Loop over pairs with foreach
foreach (KeyValuePair<string, framework> pair in dictFrameworks)
{
framework fr = pair.Value;
add_frameworks_build_phase(fCurrentXcodeProjFile, fr.sId, fr.sName);
}
bFrameworks_build_added = true;
}
// The PBXGroup is the section that appears in XCode as 'Copy Bundle Resources'.
if (section == "PBXGroup"
&& line.Trim().Length > 7
&& string.Compare(line.Trim().Substring(0, 8) , "children") == 0
&& lines[i-2].Trim().Split(' ').Length > 0
&& string.Compare(lines[i-2].Trim().Split(' ')[2] , "CustomTemplate" ) == 0 )
{
Debug.Log("Adding frameworks in PBXGroup");
// Loop over pairs with foreach
foreach (KeyValuePair<string, framework> pair in dictFrameworks)
{
framework fr = pair.Value;
Debug.Log(fr.sName);
add_group(fCurrentXcodeProjFile, fr.sFileId, fr.sName);
}
}
++i;
}
fCurrentXcodeProjFile.Close();
}
}
/////////////////
///////////
// ROUTINES
///////////
/////////////////
// Adds a line into the PBXBuildFile section
private static void add_build_file(StreamWriter file, string id, string name, string fileref, bool weak)
{
Debug.Log("OnPostProcessBuild - Adding build file " + name);
string subsection = "Frameworks";
string settings = weak ? "settings = {ATTRIBUTES = (Weak, ); }; " : string.Empty;
file.Write("\t\t" + id + " /* " + name + " in " + subsection + " */ = {isa = PBXBuildFile; fileRef = " + fileref + " /* " + name + " */; " + settings + "};\n");
}
// Adds a line into the PBXBuildFile section
private static void add_framework_file_reference(StreamWriter file, string id, string name, string path)
{
Debug.Log("OnPostProcessBuild - Adding framework file reference " + name);
string sourceTree = null;
if(null == path)
{
sourceTree = "SDKROOT";
path = DEFAULT_FRAMEWORKS_FOLDER; // all the frameworks come from here
}
else
{
sourceTree = "\"<group>\"";
}
if (name == "libsqlite3.0.dylib") // except for libsqlite
path = "usr/lib";
file.Write("\t\t" + id + " /* " + name + " */ = {isa = PBXFileReference; lastKnownFileType = wrapper.framework; name = " + name + "; path = \"" + path + "/" + name + "\"; sourceTree = "+ sourceTree + "; };\n");
}
// Adds a line into the PBXFrameworksBuildPhase section
private static void add_frameworks_build_phase(StreamWriter file, string id, string name)
{
Debug.Log("OnPostProcessBuild - Adding build phase " + name);
file.Write("\t\t\t\t" + id + " /* " + name + " in Frameworks */,\n");
}
// Adds a line into the PBXGroup section
private static void add_group(StreamWriter file, string id, string name)
{
Debug.Log("OnPostProcessBuild - Add group " + name);
file.Write("\t\t\t\t" + id + " /* " + name + " */,\n");
}
}
}
| |
#region License
//
// Copyright (c) 2007-2009, Sean Chambers <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System.IO;
using System.Reflection;
using FluentMigrator.Infrastructure;
using FluentMigrator.Model;
using NUnit.Framework;
using NUnit.Should;
namespace FluentMigrator.Tests.Unit
{
[TestFixture]
public class DefaultMigrationConventionsTests
{
[Test]
public void GetPrimaryKeyNamePrefixesTableNameWithPKAndUnderscore()
{
DefaultMigrationConventions.GetPrimaryKeyName("Foo").ShouldBe("PK_Foo");
}
[Test]
public void GetForeignKeyNameReturnsValidForeignKeyNameForSimpleForeignKey()
{
var foreignKey = new ForeignKeyDefinition
{
ForeignTable = "Users", ForeignColumns = new[] { "GroupId" },
PrimaryTable = "Groups", PrimaryColumns = new[] { "Id" }
};
DefaultMigrationConventions.GetForeignKeyName(foreignKey).ShouldBe("FK_Users_GroupId_Groups_Id");
}
[Test]
public void GetForeignKeyNameReturnsValidForeignKeyNameForComplexForeignKey()
{
var foreignKey = new ForeignKeyDefinition
{
ForeignTable = "Users", ForeignColumns = new[] { "ColumnA", "ColumnB" },
PrimaryTable = "Groups", PrimaryColumns = new[] { "ColumnC", "ColumnD" }
};
DefaultMigrationConventions.GetForeignKeyName(foreignKey).ShouldBe("FK_Users_ColumnA_ColumnB_Groups_ColumnC_ColumnD");
}
[Test]
public void GetIndexNameReturnsValidIndexNameForSimpleIndex()
{
var index = new IndexDefinition
{
TableName = "Bacon",
Columns =
{
new IndexColumnDefinition { Name = "BaconName", Direction = Direction.Ascending }
}
};
DefaultMigrationConventions.GetIndexName(index).ShouldBe("IX_Bacon_BaconName");
}
[Test]
public void GetIndexNameReturnsValidIndexNameForComplexIndex()
{
var index = new IndexDefinition
{
TableName = "Bacon",
Columns =
{
new IndexColumnDefinition { Name = "BaconName", Direction = Direction.Ascending },
new IndexColumnDefinition { Name = "BaconSpice", Direction = Direction.Descending }
}
};
DefaultMigrationConventions.GetIndexName(index).ShouldBe("IX_Bacon_BaconName_BaconSpice");
}
[Test]
public void TypeIsMigrationReturnsTrueIfTypeExtendsMigrationAndHasMigrationAttribute()
{
DefaultMigrationConventions.TypeIsMigration(typeof(DefaultConventionMigrationFake))
.ShouldBeTrue();
}
[Test]
public void TypeIsMigrationReturnsFalseIfTypeDoesNotExtendMigration()
{
DefaultMigrationConventions.TypeIsMigration(typeof(object))
.ShouldBeFalse();
}
[Test]
public void TypeIsMigrationReturnsFalseIfTypeDoesNotHaveMigrationAttribute()
{
DefaultMigrationConventions.TypeIsMigration(typeof(MigrationWithoutAttributeFake))
.ShouldBeFalse();
}
[Test]
public void MigrationInfoShouldRetainMigration()
{
var migration = new DefaultConventionMigrationFake();
var migrationinfo = DefaultMigrationConventions.GetMigrationInfoFor(migration);
migrationinfo.Migration.ShouldBeSameAs(migration);
}
[Test]
public void MigrationInfoShouldExtractVersion()
{
var migration = new DefaultConventionMigrationFake();
var migrationinfo = DefaultMigrationConventions.GetMigrationInfoFor(migration);
migrationinfo.Version.ShouldBe(123);
}
[Test]
public void MigrationInfoShouldExtractTransactionBehavior()
{
var migration = new DefaultConventionMigrationFake();
var migrationinfo = DefaultMigrationConventions.GetMigrationInfoFor(migration);
migrationinfo.TransactionBehavior.ShouldBe(TransactionBehavior.None);
}
[Test]
public void MigrationInfoShouldExtractTraits()
{
var migration = new DefaultConventionMigrationFake();
var migrationinfo = DefaultMigrationConventions.GetMigrationInfoFor(migration);
migrationinfo.Trait("key").ShouldBe("test");
}
[Test]
[Category("Integration")]
public void WorkingDirectoryConventionDefaultsToAssemblyFolder()
{
var defaultWorkingDirectory = DefaultMigrationConventions.GetWorkingDirectory();
defaultWorkingDirectory.ShouldNotBeNull();
defaultWorkingDirectory.Contains("bin").ShouldBeTrue();
}
[Test]
public void TypeHasTagsReturnTrueIfTypeHasTagsAttribute()
{
DefaultMigrationConventions.TypeHasTags(typeof(TaggedWithUk))
.ShouldBeTrue();
}
[Test]
public void TypeHasTagsReturnFalseIfTypeDoesNotHaveTagsAttribute()
{
DefaultMigrationConventions.TypeHasTags(typeof(HasNoTagsFake))
.ShouldBeFalse();
}
public class TypeHasMatchingTags
{
[Test]
[Category("Tagging")]
public void WhenTypeHasTagAttributeButNoTagsPassedInReturnsFalse()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithUk), new string[] { })
.ShouldBeFalse();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasTagAttributeWithNoTagNamesReturnsFalse()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(HasTagAttributeWithNoTagNames), new string[] { })
.ShouldBeFalse();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasOneTagThatDoesNotMatchSingleThenTagReturnsFalse()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithUk), new[] { "IE" })
.ShouldBeFalse();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasOneTagThatDoesMatchSingleTagThenReturnsTrue()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithUk), new[] { "UK" })
.ShouldBeTrue();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasOneTagThatPartiallyMatchesTagThenReturnsFalse()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithUk), new[] { "UK2" })
.ShouldBeFalse();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasOneTagThatDoesMatchMultipleTagsThenReturnsFalse()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithUk), new[] { "UK", "Production" })
.ShouldBeFalse();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasTagsInTwoAttributeThatDoesMatchSingleTagThenReturnsTrue()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithBeAndUkAndProductionAndStagingInTwoTagsAttributes), new[] { "UK" })
.ShouldBeTrue();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasTagsInTwoAttributesThatDoesMatchMultipleTagsThenReturnsTrue()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithBeAndUkAndProductionAndStagingInTwoTagsAttributes), new[] { "UK", "Production" })
.ShouldBeTrue();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasTagsInOneAttributeThatDoesMatchMultipleTagsThenReturnsTrue()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithBeAndUkAndProductionAndStagingInOneTagsAttribute), new[] { "UK", "Production" })
.ShouldBeTrue();
}
[Test]
[Category("Tagging")]
public void WhenTypeHasTagsInTwoAttributesThatDontNotMatchMultipleTagsThenReturnsFalse()
{
DefaultMigrationConventions.TypeHasMatchingTags(typeof(TaggedWithBeAndUkAndProductionAndStagingInTwoTagsAttributes), new[] { "UK", "IE" })
.ShouldBeFalse();
}
}
[FluentMigrator.Migration(20130508175300)]
class AutoScriptMigrationFake : AutoScriptMigration { }
[Test]
public void GetAutoScriptUpName()
{
var type = typeof(AutoScriptMigrationFake);
var databaseType = "sqlserver";
DefaultMigrationConventions.GetAutoScriptUpName(type, databaseType)
.ShouldBe("Scripts.Up.20130508175300_AutoScriptMigrationFake_sqlserver.sql");
}
[Test]
public void GetAutoScriptDownName()
{
var type = typeof(AutoScriptMigrationFake);
var databaseType = "sqlserver";
DefaultMigrationConventions.GetAutoScriptDownName(type, databaseType)
.ShouldBe("Scripts.Down.20130508175300_AutoScriptMigrationFake_sqlserver.sql");
}
}
[Tags("BE", "UK", "Staging", "Production")]
public class TaggedWithBeAndUkAndProductionAndStagingInOneTagsAttribute
{
}
[Tags("BE", "UK")]
[Tags("Staging", "Production")]
public class TaggedWithBeAndUkAndProductionAndStagingInTwoTagsAttributes
{
}
[Tags("UK")]
public class TaggedWithUk
{
}
[Tags]
public class HasTagAttributeWithNoTagNames
{
}
public class HasNoTagsFake
{
}
[Migration(123, TransactionBehavior.None)]
[MigrationTrait("key", "test")]
internal class DefaultConventionMigrationFake : Migration
{
public override void Up() { }
public override void Down() { }
}
internal class MigrationWithoutAttributeFake : Migration
{
public override void Up() { }
public override void Down() { }
}
}
| |
// Copyright (c) .NET Foundation and contributors. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace Microsoft.NET.HostModel
{
/// <summary>
/// Provides methods for modifying the embedded native resources
/// in a PE image. It currently only works on Windows, because it
/// requires various kernel32 APIs.
/// </summary>
public class ResourceUpdater : IDisposable
{
private sealed class Kernel32
{
//
// Native methods for updating resources
//
[DllImport(nameof(Kernel32), CharSet = CharSet.Unicode, SetLastError=true)]
public static extern SafeUpdateHandle BeginUpdateResource(string pFileName,
[MarshalAs(UnmanagedType.Bool)]bool bDeleteExistingResources);
// Update a resource with data from an IntPtr
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool UpdateResource(SafeUpdateHandle hUpdate,
IntPtr lpType,
IntPtr lpName,
ushort wLanguage,
IntPtr lpData,
uint cbData);
// Update a resource with data from a managed byte[]
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool UpdateResource(SafeUpdateHandle hUpdate,
IntPtr lpType,
IntPtr lpName,
ushort wLanguage,
[MarshalAs(UnmanagedType.LPArray, SizeParamIndex=5)] byte[] lpData,
uint cbData);
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool EndUpdateResource(SafeUpdateHandle hUpdate,
bool fDiscard);
// The IntPtr version of this dllimport is used in the
// SafeHandle implementation
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool EndUpdateResource(IntPtr hUpdate,
bool fDiscard);
public const ushort LangID_LangNeutral_SublangNeutral = 0;
//
// Native methods used to read resources from a PE file
//
// Loading and freeing PE files
public enum LoadLibraryFlags : uint
{
LOAD_LIBRARY_AS_DATAFILE_EXCLUSIVE = 0x00000040,
LOAD_LIBRARY_AS_IMAGE_RESOURCE = 0x00000020
}
[DllImport(nameof(Kernel32), CharSet = CharSet.Unicode, SetLastError=true)]
public static extern IntPtr LoadLibraryEx(string lpFileName,
IntPtr hReservedNull,
LoadLibraryFlags dwFlags);
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool FreeLibrary(IntPtr hModule);
// Enumerating resources
public delegate bool EnumResTypeProc(IntPtr hModule,
IntPtr lpType,
IntPtr lParam);
public delegate bool EnumResNameProc(IntPtr hModule,
IntPtr lpType,
IntPtr lpName,
IntPtr lParam);
public delegate bool EnumResLangProc(IntPtr hModule,
IntPtr lpType,
IntPtr lpName,
ushort wLang,
IntPtr lParam);
[DllImport(nameof(Kernel32),SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool EnumResourceTypes(IntPtr hModule,
EnumResTypeProc lpEnumFunc,
IntPtr lParam);
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool EnumResourceNames(IntPtr hModule,
IntPtr lpType,
EnumResNameProc lpEnumFunc,
IntPtr lParam);
[DllImport(nameof(Kernel32), SetLastError=true)]
[return: MarshalAs(UnmanagedType.Bool)]
public static extern bool EnumResourceLanguages(IntPtr hModule,
IntPtr lpType,
IntPtr lpName,
EnumResLangProc lpEnumFunc,
IntPtr lParam);
public const int UserStoppedResourceEnumerationHRESULT = unchecked((int)0x80073B02);
public const int ResourceDataNotFoundHRESULT = unchecked((int)0x80070714);
// Querying and loading resources
[DllImport(nameof(Kernel32), SetLastError=true)]
public static extern IntPtr FindResourceEx(IntPtr hModule,
IntPtr lpType,
IntPtr lpName,
ushort wLanguage);
[DllImport(nameof(Kernel32), SetLastError=true)]
public static extern IntPtr LoadResource(IntPtr hModule,
IntPtr hResInfo);
[DllImport(nameof(Kernel32))] // does not call SetLastError
public static extern IntPtr LockResource(IntPtr hResData);
[DllImport(nameof(Kernel32), SetLastError=true)]
public static extern uint SizeofResource(IntPtr hModule,
IntPtr hResInfo);
}
/// <summary>
/// Holds the update handle returned by BeginUpdateResource.
/// Normally, native resources for the update handle are
/// released by a call to ResourceUpdater.Update(). In case
/// this doesn't happen, the SafeUpdateHandle will release the
/// native resources for the update handle without updating
/// the target file.
/// </summary>
private class SafeUpdateHandle : SafeHandle
{
private SafeUpdateHandle() : base(IntPtr.Zero, true)
{
}
public override bool IsInvalid => handle == IntPtr.Zero;
protected override bool ReleaseHandle()
{
// discard pending updates without writing them
return Kernel32.EndUpdateResource(handle, true);
}
}
/// <summary>
/// Holds the native handle for the resource update.
/// </summary>
private readonly SafeUpdateHandle hUpdate;
///<summary>
/// Determines if the ResourceUpdater is supported by the current operating system.
/// Some versions of Windows, such as Nano Server, do not support the needed APIs.
/// </summary>
public static bool IsSupportedOS()
{
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
return false;
}
try
{
// On Nano Server 1709+, `BeginUpdateResource` is exported but returns a null handle with a zero error
// Try to call `BeginUpdateResource` with an invalid parameter; the error should be non-zero if supported
using (var handle = Kernel32.BeginUpdateResource("", false))
{
if (handle.IsInvalid && Marshal.GetLastWin32Error() == 0)
{
return false;
}
}
}
catch (EntryPointNotFoundException)
{
// BeginUpdateResource isn't exported from Kernel32
return false;
}
return true;
}
/// <summary>
/// Create a resource updater for the given PE file. This will
/// acquire a native resource update handle for the file,
/// preparing it for updates. Resources can be added to this
/// updater, which will queue them for update. The target PE
/// file will not be modified until Update() is called, after
/// which the ResourceUpdater can not be used for further
/// updates.
/// </summary>
public ResourceUpdater(string peFile)
{
hUpdate = Kernel32.BeginUpdateResource(peFile, false);
if (hUpdate.IsInvalid)
{
ThrowExceptionForLastWin32Error();
}
}
/// <summary>
/// Add all resources from a source PE file. It is assumed
/// that the input is a valid PE file. If it is not, an
/// exception will be thrown. This will not modify the target
/// until Update() is called.
/// Throws an InvalidOperationException if Update() was already called.
/// </summary>
public ResourceUpdater AddResourcesFromPEImage(string peFile)
{
if (hUpdate.IsInvalid)
{
ThrowExceptionForInvalidUpdate();
}
// Using both flags lets the OS loader decide how to load
// it most efficiently. Either mode will prevent other
// processes from modifying the module while it is loaded.
IntPtr hModule = Kernel32.LoadLibraryEx(peFile, IntPtr.Zero,
Kernel32.LoadLibraryFlags.LOAD_LIBRARY_AS_DATAFILE_EXCLUSIVE |
Kernel32.LoadLibraryFlags.LOAD_LIBRARY_AS_IMAGE_RESOURCE);
if (hModule == IntPtr.Zero)
{
ThrowExceptionForLastWin32Error();
}
var enumTypesCallback = new Kernel32.EnumResTypeProc(EnumAndUpdateTypesCallback);
var errorInfo = new EnumResourcesErrorInfo();
GCHandle errorInfoHandle = GCHandle.Alloc(errorInfo);
var errorInfoPtr = GCHandle.ToIntPtr(errorInfoHandle);
try
{
if (!Kernel32.EnumResourceTypes(hModule, enumTypesCallback, errorInfoPtr))
{
if (Marshal.GetHRForLastWin32Error() != Kernel32.ResourceDataNotFoundHRESULT)
{
CaptureEnumResourcesErrorInfo(errorInfoPtr);
errorInfo.ThrowException();
}
}
}
finally
{
errorInfoHandle.Free();
if (!Kernel32.FreeLibrary(hModule))
{
ThrowExceptionForLastWin32Error();
}
}
return this;
}
private static bool IsIntResource(IntPtr lpType)
{
return ((uint)lpType >> 16) == 0;
}
/// <summary>
/// Add a language-neutral integer resource from a byte[] with
/// a particular type and name. This will not modify the
/// target until Update() is called.
/// Throws an InvalidOperationException if Update() was already called.
/// </summary>
public ResourceUpdater AddResource(byte[] data, IntPtr lpType, IntPtr lpName)
{
if (hUpdate.IsInvalid)
{
ThrowExceptionForInvalidUpdate();
}
if (!IsIntResource(lpType) || !IsIntResource(lpName))
{
throw new ArgumentException("AddResource can only be used with integer resource types");
}
if (!Kernel32.UpdateResource(hUpdate, lpType, lpName, Kernel32.LangID_LangNeutral_SublangNeutral, data, (uint)data.Length))
{
ThrowExceptionForLastWin32Error();
}
return this;
}
/// <summary>
/// Write the pending resource updates to the target PE
/// file. After this, the ResourceUpdater no longer maintains
/// an update handle, and can not be used for further updates.
/// Throws an InvalidOperationException if Update() was already called.
/// </summary>
public void Update()
{
if (hUpdate.IsInvalid)
{
ThrowExceptionForInvalidUpdate();
}
try
{
if (!Kernel32.EndUpdateResource(hUpdate, false))
{
ThrowExceptionForLastWin32Error();
}
}
finally
{
hUpdate.SetHandleAsInvalid();
}
}
private bool EnumAndUpdateTypesCallback(IntPtr hModule, IntPtr lpType, IntPtr lParam)
{
var enumNamesCallback = new Kernel32.EnumResNameProc(EnumAndUpdateNamesCallback);
if (!Kernel32.EnumResourceNames(hModule, lpType, enumNamesCallback, lParam))
{
CaptureEnumResourcesErrorInfo(lParam);
return false;
}
return true;
}
private bool EnumAndUpdateNamesCallback(IntPtr hModule, IntPtr lpType, IntPtr lpName, IntPtr lParam)
{
var enumLanguagesCallback = new Kernel32.EnumResLangProc(EnumAndUpdateLanguagesCallback);
if (!Kernel32.EnumResourceLanguages(hModule, lpType, lpName, enumLanguagesCallback, lParam))
{
CaptureEnumResourcesErrorInfo(lParam);
return false;
}
return true;
}
private bool EnumAndUpdateLanguagesCallback(IntPtr hModule, IntPtr lpType, IntPtr lpName, ushort wLang, IntPtr lParam)
{
IntPtr hResource = Kernel32.FindResourceEx(hModule, lpType, lpName, wLang);
if (hResource == IntPtr.Zero)
{
CaptureEnumResourcesErrorInfo(lParam);
return false;
}
// hResourceLoaded is just a handle to the resource, which
// can be used to get the resource data
IntPtr hResourceLoaded = Kernel32.LoadResource(hModule, hResource);
if (hResourceLoaded == IntPtr.Zero)
{
CaptureEnumResourcesErrorInfo(lParam);
return false;
}
// This doesn't actually lock memory. It just retrieves a
// pointer to the resource data. The pointer is valid
// until the module is unloaded.
IntPtr lpResourceData = Kernel32.LockResource(hResourceLoaded);
if (lpResourceData == IntPtr.Zero)
{
((EnumResourcesErrorInfo)GCHandle.FromIntPtr(lParam).Target).failedToLockResource = true;
}
if (!Kernel32.UpdateResource(hUpdate, lpType, lpName, wLang, lpResourceData, Kernel32.SizeofResource(hModule, hResource)))
{
CaptureEnumResourcesErrorInfo(lParam);
return false;
}
return true;
}
private class EnumResourcesErrorInfo
{
public int hResult;
public bool failedToLockResource;
public void ThrowException()
{
if (failedToLockResource)
{
Debug.Assert(hResult == 0);
throw new ResourceNotAvailableException("Failed to lock resource");
}
Debug.Assert(hResult != 0);
throw new HResultException(hResult);
}
}
private static void CaptureEnumResourcesErrorInfo(IntPtr errorInfoPtr)
{
int hResult = Marshal.GetHRForLastWin32Error();
if (hResult != Kernel32.UserStoppedResourceEnumerationHRESULT)
{
GCHandle errorInfoHandle = GCHandle.FromIntPtr(errorInfoPtr);
var errorInfo = (EnumResourcesErrorInfo)errorInfoHandle.Target;
errorInfo.hResult = hResult;
}
}
private class ResourceNotAvailableException : Exception
{
public ResourceNotAvailableException(string message) : base(message)
{
}
}
private static void ThrowExceptionForLastWin32Error()
{
throw new HResultException(Marshal.GetHRForLastWin32Error());
}
private static void ThrowExceptionForInvalidUpdate()
{
throw new InvalidOperationException("Update handle is invalid. This instance may not be used for further updates");
}
public void Dispose()
{
Dispose(true);
GC.SuppressFinalize(this);
}
public void Dispose(bool disposing)
{
if (disposing)
{
hUpdate.Dispose();
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.18052
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System.Xml.Serialization;
//
// This source code was auto-generated by xsd, Version=4.0.30319.17929.
//
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class schema : openAttrs {
private openAttrs[] itemsField;
private topLevelSimpleType[] simpleTypeField;
private topLevelComplexType[] complexTypeField;
private namedGroup[] groupField;
private namedAttributeGroup[] attributeGroupField;
private topLevelElement[] elementField;
private topLevelAttribute[] attributeField;
private notation[] notationField;
private annotation[] annotationField;
private string targetNamespaceField;
private string versionField;
private string finalDefaultField;
private string blockDefaultField;
private formChoice attributeFormDefaultField;
private formChoice elementFormDefaultField;
private string idField;
private string langField;
public schema() {
this.finalDefaultField = "";
this.blockDefaultField = "";
this.attributeFormDefaultField = formChoice.unqualified;
this.elementFormDefaultField = formChoice.unqualified;
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("annotation", typeof(annotation))]
[System.Xml.Serialization.XmlElementAttribute("import", typeof(import))]
[System.Xml.Serialization.XmlElementAttribute("include", typeof(include))]
[System.Xml.Serialization.XmlElementAttribute("redefine", typeof(redefine))]
public openAttrs[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("simpleType")]
public topLevelSimpleType[] simpleType {
get {
return this.simpleTypeField;
}
set {
this.simpleTypeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("complexType")]
public topLevelComplexType[] complexType {
get {
return this.complexTypeField;
}
set {
this.complexTypeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("group")]
public namedGroup[] group {
get {
return this.groupField;
}
set {
this.groupField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("attributeGroup")]
public namedAttributeGroup[] attributeGroup {
get {
return this.attributeGroupField;
}
set {
this.attributeGroupField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("element")]
public topLevelElement[] element {
get {
return this.elementField;
}
set {
this.elementField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("attribute")]
public topLevelAttribute[] attribute {
get {
return this.attributeField;
}
set {
this.attributeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("notation")]
public notation[] notation {
get {
return this.notationField;
}
set {
this.notationField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("annotation")]
public annotation[] annotation {
get {
return this.annotationField;
}
set {
this.annotationField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string targetNamespace {
get {
return this.targetNamespaceField;
}
set {
this.targetNamespaceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="token")]
public string version {
get {
return this.versionField;
}
set {
this.versionField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute("")]
public string finalDefault {
get {
return this.finalDefaultField;
}
set {
this.finalDefaultField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute("")]
public string blockDefault {
get {
return this.blockDefaultField;
}
set {
this.blockDefaultField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(formChoice.unqualified)]
public formChoice attributeFormDefault {
get {
return this.attributeFormDefaultField;
}
set {
this.attributeFormDefaultField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(formChoice.unqualified)]
public formChoice elementFormDefault {
get {
return this.elementFormDefaultField;
}
set {
this.elementFormDefaultField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="ID")]
public string id {
get {
return this.idField;
}
set {
this.idField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(Form=System.Xml.Schema.XmlSchemaForm.Qualified, Namespace="http://www.w3.org/XML/1998/namespace")]
public string lang {
get {
return this.langField;
}
set {
this.langField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class annotation : openAttrs {
private object[] itemsField;
private string idField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("appinfo", typeof(appinfo))]
[System.Xml.Serialization.XmlElementAttribute("documentation", typeof(documentation))]
public object[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="ID")]
public string id {
get {
return this.idField;
}
set {
this.idField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class appinfo {
private System.Xml.XmlNode[] anyField;
private string sourceField;
private System.Xml.XmlAttribute[] anyAttrField;
/// <remarks/>
[System.Xml.Serialization.XmlTextAttribute()]
[System.Xml.Serialization.XmlAnyElementAttribute()]
public System.Xml.XmlNode[] Any {
get {
return this.anyField;
}
set {
this.anyField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string source {
get {
return this.sourceField;
}
set {
this.sourceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAnyAttributeAttribute()]
public System.Xml.XmlAttribute[] AnyAttr {
get {
return this.anyAttrField;
}
set {
this.anyAttrField = value;
}
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(annotated))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(extensionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExtensionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attributeGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedAttributeGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attributeGroupRef))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(wildcard))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(keybase))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(element))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localElement))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(narrowMaxMin))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelElement))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(group))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(explicitGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(all))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExplicitGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(realGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(groupRef))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(restrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleRestrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(complexRestrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(complexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelComplexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localComplexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(facet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(numFacet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(noFixedFacet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelSimpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localSimpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attribute))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelAttribute))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class openAttrs {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(extensionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExtensionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attributeGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedAttributeGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attributeGroupRef))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(wildcard))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(keybase))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(element))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localElement))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(narrowMaxMin))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelElement))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(group))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(explicitGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(all))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExplicitGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(realGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(groupRef))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(restrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleRestrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(complexRestrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(complexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelComplexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localComplexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(facet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(numFacet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(noFixedFacet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelSimpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localSimpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attribute))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelAttribute))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class annotated : openAttrs {
private annotation annotationField;
private string idField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=0)]
public annotation annotation {
get {
return this.annotationField;
}
set {
this.annotationField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="ID")]
public string id {
get {
return this.idField;
}
set {
this.idField = value;
}
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExtensionType))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class extensionType : annotated {
private groupRef groupField;
private all allField;
private explicitGroup choiceField;
private explicitGroup sequenceField;
private annotated[] itemsField;
private wildcard anyAttributeField;
private System.Xml.XmlQualifiedName baseField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=0)]
public groupRef group {
get {
return this.groupField;
}
set {
this.groupField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=1)]
public all all {
get {
return this.allField;
}
set {
this.allField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=2)]
public explicitGroup choice {
get {
return this.choiceField;
}
set {
this.choiceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=3)]
public explicitGroup sequence {
get {
return this.sequenceField;
}
set {
this.sequenceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("attribute", typeof(attribute), Order=4)]
[System.Xml.Serialization.XmlElementAttribute("attributeGroup", typeof(attributeGroupRef), Order=4)]
public annotated[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=5)]
public wildcard anyAttribute {
get {
return this.anyAttributeField;
}
set {
this.anyAttributeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @base {
get {
return this.baseField;
}
set {
this.baseField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class groupRef : realGroup {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(groupRef))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedGroup))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class realGroup : group {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(explicitGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(all))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExplicitGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(realGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(groupRef))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedGroup))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public abstract partial class group : annotated {
private annotated[] itemsField;
private ItemsChoiceType[] itemsElementNameField;
private string nameField;
private System.Xml.XmlQualifiedName refField;
private string minOccursField;
private string maxOccursField;
public group() {
this.minOccursField = "1";
this.maxOccursField = "1";
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("all", typeof(all), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("any", typeof(any), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("choice", typeof(explicitGroup), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("element", typeof(localElement), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("group", typeof(groupRef), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("sequence", typeof(explicitGroup), Order=0)]
[System.Xml.Serialization.XmlChoiceIdentifierAttribute("ItemsElementName")]
public annotated[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("ItemsElementName", Order=1)]
[System.Xml.Serialization.XmlIgnoreAttribute()]
public ItemsChoiceType[] ItemsElementName {
get {
return this.itemsElementNameField;
}
set {
this.itemsElementNameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @ref {
get {
return this.refField;
}
set {
this.refField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="nonNegativeInteger")]
[System.ComponentModel.DefaultValueAttribute("1")]
public string minOccurs {
get {
return this.minOccursField;
}
set {
this.minOccursField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute("1")]
public string maxOccurs {
get {
return this.maxOccursField;
}
set {
this.maxOccursField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class all : explicitGroup {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(all))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleExplicitGroup))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("choice", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class explicitGroup : group {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class simpleExplicitGroup : explicitGroup {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class any : wildcard {
private string minOccursField;
private string maxOccursField;
public any() {
this.minOccursField = "1";
this.maxOccursField = "1";
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="nonNegativeInteger")]
[System.ComponentModel.DefaultValueAttribute("1")]
public string minOccurs {
get {
return this.minOccursField;
}
set {
this.minOccursField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute("1")]
public string maxOccurs {
get {
return this.maxOccursField;
}
set {
this.maxOccursField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("anyAttribute", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class wildcard : annotated {
private string namespaceField;
private wildcardProcessContents processContentsField;
public wildcard() {
this.namespaceField = "##any";
this.processContentsField = wildcardProcessContents.strict;
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute("##any")]
public string @namespace {
get {
return this.namespaceField;
}
set {
this.namespaceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(wildcardProcessContents.strict)]
public wildcardProcessContents processContents {
get {
return this.processContentsField;
}
set {
this.processContentsField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
public enum wildcardProcessContents {
/// <remarks/>
skip,
/// <remarks/>
lax,
/// <remarks/>
strict,
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(narrowMaxMin))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class localElement : element {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localElement))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(narrowMaxMin))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelElement))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public abstract partial class element : annotated {
private annotated itemField;
private keybase[] uniqueField;
private keybase[] keyField;
private keyref[] keyrefField;
private string nameField;
private System.Xml.XmlQualifiedName refField;
private System.Xml.XmlQualifiedName typeField;
private System.Xml.XmlQualifiedName substitutionGroupField;
private string minOccursField;
private string maxOccursField;
private string defaultField;
private string fixedField;
private bool nillableField;
private bool abstractField;
private string finalField;
private string blockField;
private formChoice formField;
private bool formFieldSpecified;
public element() {
this.minOccursField = "1";
this.maxOccursField = "1";
this.nillableField = false;
this.abstractField = false;
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("complexType", typeof(localComplexType), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("simpleType", typeof(localSimpleType), Order=0)]
public annotated Item {
get {
return this.itemField;
}
set {
this.itemField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("unique", Order=1)]
public keybase[] unique {
get {
return this.uniqueField;
}
set {
this.uniqueField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("key", Order=2)]
public keybase[] key {
get {
return this.keyField;
}
set {
this.keyField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("keyref", Order=3)]
public keyref[] keyref {
get {
return this.keyrefField;
}
set {
this.keyrefField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @ref {
get {
return this.refField;
}
set {
this.refField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName type {
get {
return this.typeField;
}
set {
this.typeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName substitutionGroup {
get {
return this.substitutionGroupField;
}
set {
this.substitutionGroupField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="nonNegativeInteger")]
[System.ComponentModel.DefaultValueAttribute("1")]
public string minOccurs {
get {
return this.minOccursField;
}
set {
this.minOccursField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute("1")]
public string maxOccurs {
get {
return this.maxOccursField;
}
set {
this.maxOccursField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string @default {
get {
return this.defaultField;
}
set {
this.defaultField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string @fixed {
get {
return this.fixedField;
}
set {
this.fixedField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool nillable {
get {
return this.nillableField;
}
set {
this.nillableField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool @abstract {
get {
return this.abstractField;
}
set {
this.abstractField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string final {
get {
return this.finalField;
}
set {
this.finalField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string block {
get {
return this.blockField;
}
set {
this.blockField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public formChoice form {
get {
return this.formField;
}
set {
this.formField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIgnoreAttribute()]
public bool formSpecified {
get {
return this.formFieldSpecified;
}
set {
this.formFieldSpecified = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class localComplexType : complexType {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelComplexType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localComplexType))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public abstract partial class complexType : annotated {
private annotated[] itemsField;
private ItemsChoiceType2[] itemsElementNameField;
private string nameField;
private bool mixedField;
private bool abstractField;
private string finalField;
private string blockField;
public complexType() {
this.mixedField = false;
this.abstractField = false;
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("all", typeof(all), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("anyAttribute", typeof(wildcard), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("attribute", typeof(attribute), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("attributeGroup", typeof(attributeGroupRef), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("choice", typeof(explicitGroup), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("complexContent", typeof(complexContent), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("group", typeof(groupRef), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("sequence", typeof(explicitGroup), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("simpleContent", typeof(simpleContent), Order=0)]
[System.Xml.Serialization.XmlChoiceIdentifierAttribute("ItemsElementName")]
public annotated[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("ItemsElementName", Order=1)]
[System.Xml.Serialization.XmlIgnoreAttribute()]
public ItemsChoiceType2[] ItemsElementName {
get {
return this.itemsElementNameField;
}
set {
this.itemsElementNameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool mixed {
get {
return this.mixedField;
}
set {
this.mixedField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool @abstract {
get {
return this.abstractField;
}
set {
this.abstractField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string final {
get {
return this.finalField;
}
set {
this.finalField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string block {
get {
return this.blockField;
}
set {
this.blockField = value;
}
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelAttribute))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class attribute : annotated {
private localSimpleType simpleTypeField;
private string nameField;
private System.Xml.XmlQualifiedName refField;
private System.Xml.XmlQualifiedName typeField;
private attributeUse useField;
private string defaultField;
private string fixedField;
private formChoice formField;
private bool formFieldSpecified;
public attribute() {
this.useField = attributeUse.optional;
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=0)]
public localSimpleType simpleType {
get {
return this.simpleTypeField;
}
set {
this.simpleTypeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @ref {
get {
return this.refField;
}
set {
this.refField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName type {
get {
return this.typeField;
}
set {
this.typeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(attributeUse.optional)]
public attributeUse use {
get {
return this.useField;
}
set {
this.useField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string @default {
get {
return this.defaultField;
}
set {
this.defaultField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string @fixed {
get {
return this.fixedField;
}
set {
this.fixedField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public formChoice form {
get {
return this.formField;
}
set {
this.formField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIgnoreAttribute()]
public bool formSpecified {
get {
return this.formFieldSpecified;
}
set {
this.formFieldSpecified = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class localSimpleType : simpleType {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(topLevelSimpleType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(localSimpleType))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public abstract partial class simpleType : annotated {
private annotated itemField;
private string finalField;
private string nameField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("list", typeof(list), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("restriction", typeof(restriction), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("union", typeof(union), Order=0)]
public annotated Item {
get {
return this.itemField;
}
set {
this.itemField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string final {
get {
return this.finalField;
}
set {
this.finalField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class list : annotated {
private localSimpleType simpleTypeField;
private System.Xml.XmlQualifiedName itemTypeField;
/// <remarks/>
public localSimpleType simpleType {
get {
return this.simpleTypeField;
}
set {
this.simpleTypeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName itemType {
get {
return this.itemTypeField;
}
set {
this.itemTypeField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class restriction : annotated {
private localSimpleType simpleTypeField;
private facet[] minExclusiveField;
private facet[] minInclusiveField;
private facet[] maxExclusiveField;
private facet[] maxInclusiveField;
private totalDigits[] totalDigitsField;
private numFacet[] fractionDigitsField;
private numFacet[] lengthField;
private numFacet[] minLengthField;
private numFacet[] maxLengthField;
private noFixedFacet[] enumerationField;
private whiteSpace[] whiteSpaceField;
private pattern[] patternField;
private System.Xml.XmlQualifiedName baseField;
/// <remarks/>
public localSimpleType simpleType {
get {
return this.simpleTypeField;
}
set {
this.simpleTypeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("minExclusive")]
public facet[] minExclusive {
get {
return this.minExclusiveField;
}
set {
this.minExclusiveField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("minInclusive")]
public facet[] minInclusive {
get {
return this.minInclusiveField;
}
set {
this.minInclusiveField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("maxExclusive")]
public facet[] maxExclusive {
get {
return this.maxExclusiveField;
}
set {
this.maxExclusiveField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("maxInclusive")]
public facet[] maxInclusive {
get {
return this.maxInclusiveField;
}
set {
this.maxInclusiveField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("totalDigits")]
public totalDigits[] totalDigits {
get {
return this.totalDigitsField;
}
set {
this.totalDigitsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("fractionDigits")]
public numFacet[] fractionDigits {
get {
return this.fractionDigitsField;
}
set {
this.fractionDigitsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("length")]
public numFacet[] length {
get {
return this.lengthField;
}
set {
this.lengthField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("minLength")]
public numFacet[] minLength {
get {
return this.minLengthField;
}
set {
this.minLengthField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("maxLength")]
public numFacet[] maxLength {
get {
return this.maxLengthField;
}
set {
this.maxLengthField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("enumeration")]
public noFixedFacet[] enumeration {
get {
return this.enumerationField;
}
set {
this.enumerationField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("whiteSpace")]
public whiteSpace[] whiteSpace {
get {
return this.whiteSpaceField;
}
set {
this.whiteSpaceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("pattern")]
public pattern[] pattern {
get {
return this.patternField;
}
set {
this.patternField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @base {
get {
return this.baseField;
}
set {
this.baseField = value;
}
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(numFacet))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(noFixedFacet))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("minExclusive", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class facet : annotated {
private string valueField;
private bool fixedField;
public facet() {
this.fixedField = false;
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public string value {
get {
return this.valueField;
}
set {
this.valueField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
[System.ComponentModel.DefaultValueAttribute(false)]
public bool @fixed {
get {
return this.fixedField;
}
set {
this.fixedField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class totalDigits : numFacet {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("fractionDigits", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class numFacet : facet {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("enumeration", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class noFixedFacet : facet {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class whiteSpace : facet {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class pattern : noFixedFacet {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class union : annotated {
private localSimpleType[] simpleTypeField;
private System.Xml.XmlQualifiedName[] memberTypesField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("simpleType")]
public localSimpleType[] simpleType {
get {
return this.simpleTypeField;
}
set {
this.simpleTypeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName[] memberTypes {
get {
return this.memberTypesField;
}
set {
this.memberTypesField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
public enum attributeUse {
/// <remarks/>
prohibited,
/// <remarks/>
optional,
/// <remarks/>
required,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public enum formChoice {
/// <remarks/>
qualified,
/// <remarks/>
unqualified,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class attributeGroupRef : attributeGroup {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(namedAttributeGroup))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(attributeGroupRef))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public abstract partial class attributeGroup : annotated {
private annotated[] itemsField;
private wildcard anyAttributeField;
private string nameField;
private System.Xml.XmlQualifiedName refField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("attribute", typeof(attribute), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("attributeGroup", typeof(attributeGroupRef), Order=0)]
public annotated[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=1)]
public wildcard anyAttribute {
get {
return this.anyAttributeField;
}
set {
this.anyAttributeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @ref {
get {
return this.refField;
}
set {
this.refField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class complexContent : annotated {
private annotated itemField;
private bool mixedField;
private bool mixedFieldSpecified;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("extension", typeof(extensionType))]
[System.Xml.Serialization.XmlElementAttribute("restriction", typeof(complexRestrictionType))]
public annotated Item {
get {
return this.itemField;
}
set {
this.itemField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public bool mixed {
get {
return this.mixedField;
}
set {
this.mixedField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlIgnoreAttribute()]
public bool mixedSpecified {
get {
return this.mixedFieldSpecified;
}
set {
this.mixedFieldSpecified = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class complexRestrictionType : restrictionType {
}
/// <remarks/>
[System.Xml.Serialization.XmlIncludeAttribute(typeof(simpleRestrictionType))]
[System.Xml.Serialization.XmlIncludeAttribute(typeof(complexRestrictionType))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class restrictionType : annotated {
private annotated[] itemsField;
private ItemsChoiceType1[] itemsElementNameField;
private annotated[] items1Field;
private wildcard anyAttributeField;
private System.Xml.XmlQualifiedName baseField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("all", typeof(all), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("choice", typeof(explicitGroup), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("enumeration", typeof(noFixedFacet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("fractionDigits", typeof(numFacet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("group", typeof(groupRef), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("length", typeof(numFacet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("maxExclusive", typeof(facet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("maxInclusive", typeof(facet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("maxLength", typeof(numFacet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("minExclusive", typeof(facet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("minInclusive", typeof(facet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("minLength", typeof(numFacet), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("pattern", typeof(pattern), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("sequence", typeof(explicitGroup), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("simpleType", typeof(localSimpleType), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("totalDigits", typeof(totalDigits), Order=0)]
[System.Xml.Serialization.XmlElementAttribute("whiteSpace", typeof(whiteSpace), Order=0)]
[System.Xml.Serialization.XmlChoiceIdentifierAttribute("ItemsElementName")]
public annotated[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("ItemsElementName", Order=1)]
[System.Xml.Serialization.XmlIgnoreAttribute()]
public ItemsChoiceType1[] ItemsElementName {
get {
return this.itemsElementNameField;
}
set {
this.itemsElementNameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("attribute", typeof(attribute), Order=2)]
[System.Xml.Serialization.XmlElementAttribute("attributeGroup", typeof(attributeGroupRef), Order=2)]
public annotated[] Items1 {
get {
return this.items1Field;
}
set {
this.items1Field = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=3)]
public wildcard anyAttribute {
get {
return this.anyAttributeField;
}
set {
this.anyAttributeField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName @base {
get {
return this.baseField;
}
set {
this.baseField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IncludeInSchema=false)]
public enum ItemsChoiceType1 {
/// <remarks/>
all,
/// <remarks/>
choice,
/// <remarks/>
enumeration,
/// <remarks/>
fractionDigits,
/// <remarks/>
group,
/// <remarks/>
length,
/// <remarks/>
maxExclusive,
/// <remarks/>
maxInclusive,
/// <remarks/>
maxLength,
/// <remarks/>
minExclusive,
/// <remarks/>
minInclusive,
/// <remarks/>
minLength,
/// <remarks/>
pattern,
/// <remarks/>
sequence,
/// <remarks/>
simpleType,
/// <remarks/>
totalDigits,
/// <remarks/>
whiteSpace,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class simpleRestrictionType : restrictionType {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class simpleContent : annotated {
private annotated itemField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("extension", typeof(simpleExtensionType))]
[System.Xml.Serialization.XmlElementAttribute("restriction", typeof(simpleRestrictionType))]
public annotated Item {
get {
return this.itemField;
}
set {
this.itemField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class simpleExtensionType : extensionType {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IncludeInSchema=false)]
public enum ItemsChoiceType2 {
/// <remarks/>
all,
/// <remarks/>
anyAttribute,
/// <remarks/>
attribute,
/// <remarks/>
attributeGroup,
/// <remarks/>
choice,
/// <remarks/>
complexContent,
/// <remarks/>
group,
/// <remarks/>
sequence,
/// <remarks/>
simpleContent,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("unique", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class keybase : annotated {
private selector selectorField;
private field[] fieldField;
private string nameField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute(Order=0)]
public selector selector {
get {
return this.selectorField;
}
set {
this.selectorField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("field", Order=1)]
public field[] field {
get {
return this.fieldField;
}
set {
this.fieldField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class selector : annotated {
private string xpathField;
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="token")]
public string xpath {
get {
return this.xpathField;
}
set {
this.xpathField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class field : annotated {
private string xpathField;
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="token")]
public string xpath {
get {
return this.xpathField;
}
set {
this.xpathField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class keyref : keybase {
private System.Xml.XmlQualifiedName referField;
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute()]
public System.Xml.XmlQualifiedName refer {
get {
return this.referField;
}
set {
this.referField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
public partial class narrowMaxMin : localElement {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IncludeInSchema=false)]
public enum ItemsChoiceType {
/// <remarks/>
all,
/// <remarks/>
any,
/// <remarks/>
choice,
/// <remarks/>
element,
/// <remarks/>
group,
/// <remarks/>
sequence,
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class documentation {
private System.Xml.XmlNode[] anyField;
private string sourceField;
private string langField;
private System.Xml.XmlAttribute[] anyAttrField;
/// <remarks/>
[System.Xml.Serialization.XmlTextAttribute()]
[System.Xml.Serialization.XmlAnyElementAttribute()]
public System.Xml.XmlNode[] Any {
get {
return this.anyField;
}
set {
this.anyField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string source {
get {
return this.sourceField;
}
set {
this.sourceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(Form=System.Xml.Schema.XmlSchemaForm.Qualified, Namespace="http://www.w3.org/XML/1998/namespace")]
public string lang {
get {
return this.langField;
}
set {
this.langField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAnyAttributeAttribute()]
public System.Xml.XmlAttribute[] AnyAttr {
get {
return this.anyAttrField;
}
set {
this.anyAttrField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class import : annotated {
private string namespaceField;
private string schemaLocationField;
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string @namespace {
get {
return this.namespaceField;
}
set {
this.namespaceField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string schemaLocation {
get {
return this.schemaLocationField;
}
set {
this.schemaLocationField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class include : annotated {
private string schemaLocationField;
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string schemaLocation {
get {
return this.schemaLocationField;
}
set {
this.schemaLocationField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class redefine : openAttrs {
private openAttrs[] itemsField;
private string schemaLocationField;
private string idField;
/// <remarks/>
[System.Xml.Serialization.XmlElementAttribute("annotation", typeof(annotation))]
[System.Xml.Serialization.XmlElementAttribute("attributeGroup", typeof(namedAttributeGroup))]
[System.Xml.Serialization.XmlElementAttribute("complexType", typeof(topLevelComplexType))]
[System.Xml.Serialization.XmlElementAttribute("group", typeof(namedGroup))]
[System.Xml.Serialization.XmlElementAttribute("simpleType", typeof(topLevelSimpleType))]
public openAttrs[] Items {
get {
return this.itemsField;
}
set {
this.itemsField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string schemaLocation {
get {
return this.schemaLocationField;
}
set {
this.schemaLocationField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="ID")]
public string id {
get {
return this.idField;
}
set {
this.idField = value;
}
}
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("attributeGroup", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class namedAttributeGroup : attributeGroup {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("complexType", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class topLevelComplexType : complexType {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("group", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class namedGroup : realGroup {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("simpleType", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class topLevelSimpleType : simpleType {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("element", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class topLevelElement : element {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute("attribute", Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class topLevelAttribute : attribute {
}
/// <remarks/>
[System.CodeDom.Compiler.GeneratedCodeAttribute("xsd", "4.0.30319.17929")]
[System.SerializableAttribute()]
[System.Diagnostics.DebuggerStepThroughAttribute()]
[System.ComponentModel.DesignerCategoryAttribute("code")]
[System.Xml.Serialization.XmlTypeAttribute(AnonymousType=true, Namespace="http://www.w3.org/2001/XMLSchema")]
[System.Xml.Serialization.XmlRootAttribute(Namespace="http://www.w3.org/2001/XMLSchema", IsNullable=false)]
public partial class notation : annotated {
private string nameField;
private string publicField;
private string systemField;
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="NCName")]
public string name {
get {
return this.nameField;
}
set {
this.nameField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="token")]
public string @public {
get {
return this.publicField;
}
set {
this.publicField = value;
}
}
/// <remarks/>
[System.Xml.Serialization.XmlAttributeAttribute(DataType="anyURI")]
public string system {
get {
return this.systemField;
}
set {
this.systemField = value;
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Globalization;
using System.Windows.Forms;
using OpenLiveWriter.Controls;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.CoreServices.Layout;
using OpenLiveWriter.Localization;
namespace OpenLiveWriter.PostEditor.Tables
{
public partial class TablePropertiesForm : ApplicationDialog
{
public TablePropertiesForm()
{
//
// Required for Windows Form Designer support
//
InitializeComponent();
this.buttonOK.Text = Res.Get(StringId.InsertButtonText);
this.buttonOK.Name = "buttonInsert"; // This is used by automation, don't change it!
this.buttonCancel.Text = Res.Get(StringId.CancelButton);
this.groupBoxSize.Text = Res.Get(StringId.Size);
this.labelRows.Text = Res.Get(StringId.TableRowsLabel);
this.labelColumns.Text = Res.Get(StringId.TableColumnsLabel);
this.groupBoxAppearance.Text = Res.Get(StringId.Appearance);
this.labelSpacingPixels.Text = Res.Get(StringId.Pixels);
this.labelPaddingPixels.Text = Res.Get(StringId.Pixels);
this.labelBorderPixels.Text = Res.Get(StringId.Pixels);
this.checkBoxShowBorder.Text = Res.Get(StringId.TableShowBorderLabel);
this.label3.Text = Res.Get(StringId.TableCellSpacingLabel);
this.label1.Text = Res.Get(StringId.TableCellPaddingLabel);
this.Text = Res.Get(StringId.InsertTable);
}
protected override void OnLoad(EventArgs e)
{
base.OnLoad(e);
using (new AutoGrow(this, AnchorStyles.Right | AnchorStyles.Bottom, false))
{
// Fix up the Size group box
using (new AutoGrow(groupBoxSize, AnchorStyles.Right, false))
{
using (new AutoGrow(panelRowsAndColumns, AnchorStyles.Right, false))
{
DisplayHelper.AutoFitSystemLabel(labelRows, 0, int.MaxValue);
DisplayHelper.AutoFitSystemLabel(labelColumns, 0, int.MaxValue);
LayoutHelper.DistributeHorizontally(8, labelRows, numericTextBoxRows);
LayoutHelper.DistributeHorizontally(8, labelColumns, numericTextBoxColumns);
LayoutHelper.DistributeHorizontally(16,
new ControlGroup(labelRows, numericTextBoxRows),
new ControlGroup(labelColumns, numericTextBoxColumns));
}
}
// Fix up the Appearance group box
using (new AutoGrow(groupBoxAppearance, AnchorStyles.Right | AnchorStyles.Bottom, false))
{
DisplayHelper.AutoFitSystemCheckBox(checkBoxShowBorder, 0, int.MaxValue);
DisplayHelper.AutoFitSystemLabel(label1, 0, int.MaxValue);
DisplayHelper.AutoFitSystemLabel(label3, 0, int.MaxValue);
DisplayHelper.AutoFitSystemLabel(labelBorderPixels, 0, int.MaxValue);
DisplayHelper.AutoFitSystemLabel(labelPaddingPixels, 0, int.MaxValue);
DisplayHelper.AutoFitSystemLabel(labelSpacingPixels, 0, int.MaxValue);
LayoutHelper.DistributeHorizontally(
8,
new ControlGroup(checkBoxShowBorder, label1, label3),
new ControlGroup(numericTextBoxBorder, numericTextBoxCellPadding, numericTextBoxCellSpacing),
new ControlGroup(labelBorderPixels, labelPaddingPixels, labelSpacingPixels)
);
}
// Make the group boxes the same width
int maxWidth = Math.Max(groupBoxAppearance.Width, groupBoxSize.Width);
groupBoxAppearance.Width = maxWidth;
groupBoxSize.Width = maxWidth;
}
// Align the OK/Cancel
ControlGroup okCancelGroup = new ControlGroup(buttonOK, buttonCancel);
okCancelGroup.Left = groupBoxAppearance.Right - okCancelGroup.Width;
LayoutHelper.FixupOKCancel(buttonOK, buttonCancel);
}
public TableCreationParameters CreateTable(IWin32Window owner)
{
// populate the form
TableCreationParameters creationParameters = CreateDefaultParameters();
numericTextBoxRows.Text = creationParameters.Rows.ToString(CultureInfo.CurrentCulture);
numericTextBoxColumns.Text = creationParameters.Columns.ToString(CultureInfo.CurrentCulture);
var width = creationParameters.Properties.Width;
columnWidthControl.Text = width.ToString(CultureInfo.CurrentCulture);
InitializeFormProperties(creationParameters.Properties);
// show the dialog
if (ShowDialog(owner) == DialogResult.OK)
{
// read input
TableCreationParameters parameters = new TableCreationParameters(
int.Parse(numericTextBoxRows.Text, CultureInfo.CurrentCulture),
int.Parse(numericTextBoxColumns.Text, CultureInfo.CurrentCulture),
ReadFormProperties());
// save as default for new tables
SaveDefaultTableCreationParameters(parameters);
// return
return parameters;
}
else
{
return null;
}
}
public TableProperties EditTable(IWin32Window owner, TableProperties properties)
{
// update UI for edit scenario
Text = Res.Get(StringId.TableTableProperties);
buttonOK.Text = Res.Get(StringId.OKButtonText);
int verticalSpaceLoss = columnWidthControl.Top - panelRowsAndColumns.Top;
panelRowsAndColumns.Visible = false;
columnWidthControl.Top = panelRowsAndColumns.Top;
groupBoxSize.Height -= verticalSpaceLoss;
groupBoxAppearance.Top -= verticalSpaceLoss;
Height -= verticalSpaceLoss;
// populate the form
InitializeFormProperties(properties);
// show the dialog
if (ShowDialog(owner) == DialogResult.OK)
{
return ReadFormProperties();
}
else
{
return null;
}
}
private void InitializeFormProperties(TableProperties properties)
{
BorderSize = properties.BorderSize;
numericTextBoxCellPadding.Text = properties.CellPadding;
numericTextBoxCellSpacing.Text = properties.CellSpacing;
columnWidthControl.ColumnWidth = properties.Width;
}
private TableProperties ReadFormProperties()
{
TableProperties properties = new TableProperties();
properties.CellPadding = numericTextBoxCellPadding.Text.Trim();
properties.CellSpacing = numericTextBoxCellSpacing.Text.Trim();
properties.BorderSize = BorderSize;
properties.Width = columnWidthControl.ColumnWidth;
return properties;
}
private readonly string ZERO = 0.ToString(CultureInfo.CurrentCulture);
private readonly string ONE = 1.ToString(CultureInfo.CurrentCulture);
private string BorderSize
{
get
{
if (checkBoxShowBorder.Checked && numericTextBoxBorder.Text.Trim() != String.Empty)
return numericTextBoxBorder.Text;
else
return ZERO;
}
set
{
if (value != String.Empty && value != ZERO)
{
checkBoxShowBorder.Checked = true;
numericTextBoxBorder.Text = value;
}
else
{
checkBoxShowBorder.Checked = false;
numericTextBoxBorder.Text = String.Empty;
}
ManageUIState();
}
}
private void buttonOK_Click(object sender, System.EventArgs e)
{
if (ValidateInput())
{
DialogResult = DialogResult.OK;
}
}
private void checkBoxShowBorder_CheckedChanged(object sender, System.EventArgs e)
{
ManageUIState();
}
private void ManageUIState()
{
numericTextBoxBorder.Enabled = checkBoxShowBorder.Checked;
if (checkBoxShowBorder.Checked)
{
if (BorderSize == ZERO)
numericTextBoxBorder.Text = ONE;
}
}
private bool ValidateInput()
{
// only validate row and column if they are visible
if (panelRowsAndColumns.Visible)
{
if (!ValidateTextBoxInteger(Res.Get(StringId.TableRows), numericTextBoxRows, 750))
return false;
if (!ValidateTextBoxPositiveInteger(Res.Get(StringId.TableRows), numericTextBoxRows))
return false;
if (!ValidateTextBoxInteger(Res.Get(StringId.TableColumns), numericTextBoxColumns, 100))
return false;
if (!ValidateTextBoxPositiveInteger(Res.Get(StringId.TableColumns), numericTextBoxColumns))
return false;
}
if (!columnWidthControl.ValidateInput(1000))
return false;
if (checkBoxShowBorder.Checked)
{
if (!ValidateTextBoxInteger(Res.Get(StringId.TableBorder), numericTextBoxBorder, 100))
return false;
if (!ValidateTextBoxPositiveInteger(Res.Get(StringId.TableBorder), numericTextBoxBorder))
return false;
}
string cellPadding = numericTextBoxCellPadding.Text.Trim();
if (cellPadding != String.Empty)
{
if (!ValidateTextBoxInteger(Res.Get(StringId.TableCellPadding), numericTextBoxCellPadding, 100))
return false;
if (!ValidateTextBoxGreaterThanZero(Res.Get(StringId.TableCellPadding), numericTextBoxCellPadding))
return false;
}
string cellSpacing = numericTextBoxCellSpacing.Text.Trim();
if (cellSpacing != String.Empty)
{
if (!ValidateTextBoxInteger(Res.Get(StringId.TableCellSpacing), numericTextBoxCellSpacing, 100))
return false;
if (!ValidateTextBoxGreaterThanZero(Res.Get(StringId.TableCellSpacing), numericTextBoxCellSpacing))
return false;
}
// got this far, we are ok
return true;
}
private bool ValidateTextBoxInteger(string name, TextBox textBox, int maxValue)
{
string textBoxValue = textBox.Text.Trim();
if (textBoxValue == String.Empty || !SafeCheckForInt(textBox))
{
DisplayMessage.Show(MessageId.UnspecifiedValue, this, name);
textBox.Focus();
return false;
}
else if (maxValue > 0)
{
int value = int.Parse(textBoxValue, CultureInfo.CurrentCulture);
if (value >= maxValue)
{
DisplayMessage.Show(MessageId.ValueExceedsMaximum, this, maxValue, name);
textBox.Focus();
return false;
}
else
{
return true;
}
}
else
{
return true;
}
}
private bool SafeCheckForInt(TextBox textBox)
{
try
{
int.Parse(textBox.Text.Trim(), CultureInfo.CurrentCulture);
return true;
}
catch
{
return false;
}
}
private bool ValidateTextBoxPositiveInteger(string name, TextBox textBox)
{
string textBoxValue = textBox.Text.Trim();
if (textBoxValue == String.Empty || int.Parse(textBoxValue, CultureInfo.CurrentCulture) <= 0)
{
DisplayMessage.Show(MessageId.InvalidNumberPositiveOnly, this, name);
textBox.Focus();
return false;
}
else
{
return true;
}
}
private bool ValidateTextBoxGreaterThanZero(string name, TextBox textBox)
{
string textBoxValue = textBox.Text.Trim();
if (textBoxValue == String.Empty || int.Parse(textBoxValue, CultureInfo.CurrentCulture) < 0)
{
DisplayMessage.Show(MessageId.InvalidNumberNonNegative, this, name);
textBox.Focus();
return false;
}
else
{
return true;
}
}
private TableCreationParameters CreateDefaultParameters()
{
// get default table properties
TableProperties tableProperties = new TableProperties();
tableProperties.CellPadding = TableEditingSettings.DefaultCellPadding;
tableProperties.CellSpacing = TableEditingSettings.DefaultCellSpacing;
tableProperties.BorderSize = TableEditingSettings.DefaultBorderSize;
tableProperties.Width = new PixelPercent();
// return default parameters
return new TableCreationParameters(
TableEditingSettings.DefaultRows,
TableEditingSettings.DefaultColumns,
tableProperties);
}
private void SaveDefaultTableCreationParameters(TableCreationParameters parameters)
{
TableEditingSettings.DefaultRows = parameters.Rows;
TableEditingSettings.DefaultColumns = parameters.Columns;
TableEditingSettings.DefaultCellPadding = parameters.Properties.CellPadding;
TableEditingSettings.DefaultCellSpacing = parameters.Properties.CellSpacing;
TableEditingSettings.DefaultBorderSize = parameters.Properties.BorderSize;
TableEditingSettings.DefaultWidth = parameters.Properties.Width;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using UnityEngine;
public class GEOTEXHEADER
{
static GEOTEXHEADER()
{
// Note: this type is marked as 'beforefieldinit'.
Dictionary<Int32, BGObjIndex> dictionary = new Dictionary<Int32, BGObjIndex>();
dictionary.Add(4, new BGObjIndex(new Int32[1], new Int32[1]));
Dictionary<Int32, BGObjIndex> dictionary2 = dictionary;
Int32 key = 7;
Int32[] array = new Int32[3];
array[0] = 1;
Int32[] array2 = new Int32[3];
array2[0] = 1;
dictionary2.Add(key, new BGObjIndex(array, array2));
dictionary.Add(9, new BGObjIndex(new Int32[1], new Int32[]
{
10
}));
dictionary.Add(29, new BGObjIndex(new Int32[1], new Int32[1]));
dictionary.Add(32, new BGObjIndex(new Int32[]
{
2
}, new Int32[]
{
3
}));
dictionary.Add(42, new BGObjIndex(new Int32[]
{
1
}, new Int32[]
{
3
}));
dictionary.Add(51, new BGObjIndex(new Int32[1], new Int32[]
{
10
}));
dictionary.Add(52, new BGObjIndex(new Int32[1], new Int32[]
{
6
}));
dictionary.Add(56, new BGObjIndex(new Int32[]
{
1
}, new Int32[1]));
dictionary.Add(57, new BGObjIndex(new Int32[1], new Int32[]
{
2
}));
dictionary.Add(66, new BGObjIndex(new Int32[1], new Int32[]
{
1
}));
dictionary.Add(67, new BGObjIndex(new Int32[1], new Int32[]
{
10
}));
Dictionary<Int32, BGObjIndex> dictionary3 = dictionary;
Int32 key2 = 69;
Int32[] array3 = new Int32[4];
array3[0] = 2;
array3[1] = 3;
dictionary3.Add(key2, new BGObjIndex(array3, new Int32[]
{
5,
0,
5,
4
}));
dictionary.Add(71, new BGObjIndex(new Int32[]
{
0,
0,
2
}, new Int32[]
{
10,
9,
4
}));
dictionary.Add(82, new BGObjIndex(new Int32[]
{
2
}, new Int32[1]));
dictionary.Add(92, new BGObjIndex(new Int32[]
{
2
}, new Int32[]
{
3
}));
dictionary.Add(108, new BGObjIndex(new Int32[1], new Int32[]
{
11
}));
dictionary.Add(118, new BGObjIndex(new Int32[1], new Int32[]
{
4
}));
dictionary.Add(128, new BGObjIndex(new Int32[2], new Int32[]
{
2,
3
}));
dictionary.Add(137, new BGObjIndex(new Int32[2], new Int32[]
{
2,
3
}));
dictionary.Add(143, new BGObjIndex(new Int32[1], new Int32[1]));
dictionary.Add(147, new BGObjIndex(new Int32[1], new Int32[1]));
dictionary.Add(155, new BGObjIndex(new Int32[1], new Int32[1]));
dictionary.Add(164, new BGObjIndex(new Int32[1], new Int32[1]));
dictionary.Add(172, new BGObjIndex(new Int32[1], new Int32[]
{
5
}));
GEOTEXHEADER.bgObjMappings = dictionary;
}
public void ReadTextureAnim(String path)
{
String[] tabInfo;
Byte[] binAsset = AssetManager.LoadBytes(path, out tabInfo, true);
if (binAsset == null)
{
global::Debug.LogWarning("Cannot find GeoTexAnim for : " + path);
return;
}
using (BinaryReader binaryReader = new BinaryReader(new MemoryStream(binAsset)))
{
this.count = binaryReader.ReadUInt16();
this.pad = binaryReader.ReadUInt16();
this.geotex = new GEOTEXANIMHEADER[(Int32)this.count];
for (Int32 i = 0; i < (Int32)this.count; i++)
{
this.geotex[i] = new GEOTEXANIMHEADER();
this.geotex[i].ReadData(binaryReader);
}
}
}
public void ReadPlayerTextureAnim(BTL_DATA btl, String path, Int32 scale = 1)
{
this.ReadTextureAnim(path);
if (this.geotex == null)
{
return;
}
String geoName = FF9BattleDB.GEO.GetValue((Int32)btl.dms_geo_id);
if (btl.originalGo != (UnityEngine.Object)null)
{
this._smrs = btl.originalGo.GetComponentsInChildren<SkinnedMeshRenderer>();
}
else
{
this._smrs = btl.gameObject.GetComponentsInChildren<SkinnedMeshRenderer>();
}
this.MultiTexAnim(geoName, scale);
}
public void ReadTrancePlayerTextureAnim(BTL_DATA btl, String geoName, Int32 scale = 1)
{
this.ReadTextureAnim("Models/GeoTexAnim/" + geoName + ".tab");
if (this.geotex == null)
{
return;
}
btl.tranceGo.SetActive(true);
this._smrs = btl.tranceGo.GetComponentsInChildren<SkinnedMeshRenderer>();
btl.tranceGo.SetActive(false);
this.MultiTexAnim(geoName, scale);
}
private void MultiTexAnim(String geoName, Int32 scale)
{
GeoTexAnim.SetTexAnimIndexs(geoName, out this._mainTextureIndexs, out this._subTextureIndexs);
this.RenderTexWidth = (Single)this._smrs[this._mainTextureIndexs[0]].material.mainTexture.width;
this.RenderTexHeight = (Single)this._smrs[this._mainTextureIndexs[0]].material.mainTexture.height;
for (Int32 i = 0; i < (Int32)this.count; i++)
{
GeoTexAnim.MapTextureToTexAnimIndex(this._mainTextureIndexs[i], this._smrs[this._mainTextureIndexs[i]].material.mainTexture, this.TextureMapping);
GeoTexAnim.MapTextureToTexAnimIndex(this._subTextureIndexs[i], this._smrs[this._subTextureIndexs[i]].material.mainTexture, this.TextureMapping);
GeoTexAnim.MapRenderTexToTexAnimIndex(this._mainTextureIndexs[i], this.RenderTexWidth, this.RenderTexHeight, this._smrs[this._mainTextureIndexs[i]], this.TextureMapping[this._mainTextureIndexs[i]], this.RenderTexMapping);
}
Single renderTexWidth = this.RenderTexWidth;
Single renderTexHeight = this.RenderTexHeight;
for (Int32 j = 0; j < (Int32)this.count; j++)
{
Rect rect = this.geotex[j].targetuv;
rect.x *= (Single)scale;
rect.y *= (Single)scale;
rect.width *= (Single)scale;
rect.height *= (Single)scale;
this.geotex[j].targetuv = rect;
for (Int32 k = 0; k < (Int32)this.geotex[j].numframes; k++)
{
rect = this.geotex[j].rectuvs[k];
rect.x *= (Single)scale;
rect.y *= (Single)scale;
rect.width *= (Single)scale;
rect.height *= (Single)scale;
rect.y = renderTexHeight - rect.y - rect.height;
rect.x += 0.5f;
rect.y += 0.5f;
rect.width -= 1f;
rect.height -= 1f;
rect.x /= renderTexWidth;
rect.y /= renderTexHeight;
rect.width /= renderTexWidth;
rect.height /= renderTexHeight;
this.geotex[j].rectuvs[k] = rect;
}
}
}
public void ReadBGTextureAnim(String battleModelPath)
{
this.bbgnumber = Int32.Parse(battleModelPath.Replace("BBG_B", String.Empty));
String path = "BattleMap/BattleTexAnim/" + battleModelPath.Replace("BBG", "TAM") + ".tab";
this.ReadTextureAnim(path);
}
public void InitTextureAnim()
{
if (this.geotex == null)
{
return;
}
this._mrs = FF9StateSystem.Battle.FF9Battle.map.btlBGPtr.GetComponentsInChildren<MeshRenderer>();
MeshRenderer[][] array = new MeshRenderer[2][];
if (this.bbgnumber == 7)
{
array[0] = FF9StateSystem.Battle.FF9Battle.map.btlBGObjAnim[0].GetComponentsInChildren<MeshRenderer>();
array[1] = FF9StateSystem.Battle.FF9Battle.map.btlBGObjAnim[1].GetComponentsInChildren<MeshRenderer>();
}
BGObjIndex bgobjIndex = GEOTEXHEADER.bgObjMappings[this.bbgnumber];
this.texAnimMaterials = new Material[(Int32)this.count];
this.TexAnimTextures = new Texture[(Int32)this.count];
this.extraTexAimMaterials = new Material[(Int32)this.count];
this.extraTexAnimTrTextures = new Texture[(Int32)this.count];
for (Int32 i = 0; i < (Int32)this.count; i++)
{
if (this.bbgnumber == 7 && i != 0)
{
this.texAnimMaterials[i] = array[i - 1][bgobjIndex.groupIndex[i]].materials[bgobjIndex.materialIndex[i]];
this.TexAnimTextures[i] = this.texAnimMaterials[i].mainTexture;
}
else
{
this.texAnimMaterials[i] = this._mrs[bgobjIndex.groupIndex[i]].materials[bgobjIndex.materialIndex[i]];
this.TexAnimTextures[i] = this.texAnimMaterials[i].mainTexture;
if (this.bbgnumber == 57)
{
this.extraTexAimMaterials[i] = this._mrs[2].materials[0];
this.extraTexAnimTrTextures[i] = this.extraTexAimMaterials[i].mainTexture;
}
else if (this.bbgnumber == 71)
{
this.extraTexAimMaterials[i] = this._mrs[0].materials[11];
this.extraTexAnimTrTextures[i] = this.extraTexAimMaterials[i].mainTexture;
}
}
}
}
public UInt16 count;
public UInt16 pad;
public GEOTEXANIMHEADER[] geotex;
public SkinnedMeshRenderer[] _smrs;
private MeshRenderer[] _mrs;
public Material[] texAnimMaterials;
public Texture[] TexAnimTextures;
public Material[] extraTexAimMaterials;
public Texture[] extraTexAnimTrTextures;
public Int32[] _mainTextureIndexs;
public Int32[] _subTextureIndexs;
public Dictionary<Int32, Texture> TextureMapping = new Dictionary<Int32, Texture>();
public Dictionary<Int32, RenderTexture> RenderTexMapping = new Dictionary<Int32, RenderTexture>();
public Single RenderTexWidth;
public Single RenderTexHeight;
public Int32 bbgnumber;
public static Dictionary<Int32, BGObjIndex> bgObjMappings;
}
| |
using System;
using System.Collections.Generic;
using System.Threading;
using System.Reflection;
using System.Xml;
using OpenMetaverse;
using OpenMetaverse.Packets;
using OpenMetaverse.Utilities;
namespace OpenMetaverse.TestClient
{
public class TestClient : GridClient
{
public UUID GroupID = UUID.Zero;
public Dictionary<UUID, GroupMember> GroupMembers;
public Dictionary<UUID, AvatarAppearancePacket> Appearances = new Dictionary<UUID, AvatarAppearancePacket>();
public Dictionary<string, Command> Commands = new Dictionary<string, Command>();
public bool Running = true;
public bool GroupCommands = false;
public string MasterName = String.Empty;
public UUID MasterKey = UUID.Zero;
public bool AllowObjectMaster = false;
public ClientManager ClientManager;
public VoiceManager VoiceManager;
// Shell-like inventory commands need to be aware of the 'current' inventory folder.
public InventoryFolder CurrentDirectory = null;
private System.Timers.Timer updateTimer;
private UUID GroupMembersRequestID;
public Dictionary<UUID, Group> GroupsCache = null;
private ManualResetEvent GroupsEvent = new ManualResetEvent(false);
/// <summary>
///
/// </summary>
public TestClient(ClientManager manager)
{
ClientManager = manager;
updateTimer = new System.Timers.Timer(500);
updateTimer.Elapsed += new System.Timers.ElapsedEventHandler(updateTimer_Elapsed);
RegisterAllCommands(Assembly.GetExecutingAssembly());
Settings.LOG_LEVEL = Helpers.LogLevel.Debug;
Settings.LOG_RESENDS = false;
Settings.STORE_LAND_PATCHES = true;
Settings.ALWAYS_DECODE_OBJECTS = true;
Settings.ALWAYS_REQUEST_OBJECTS = true;
Settings.SEND_AGENT_UPDATES = true;
Settings.USE_ASSET_CACHE = true;
Network.RegisterCallback(PacketType.AgentDataUpdate, AgentDataUpdateHandler);
Network.LoginProgress += LoginHandler;
Self.IM += Self_IM;
Groups.GroupMembersReply += GroupMembersHandler;
Inventory.InventoryObjectOffered += Inventory_OnInventoryObjectReceived;
Network.RegisterCallback(PacketType.AvatarAppearance, AvatarAppearanceHandler);
Network.RegisterCallback(PacketType.AlertMessage, AlertMessageHandler);
VoiceManager = new VoiceManager(this);
updateTimer.Start();
}
void Self_IM(object sender, InstantMessageEventArgs e)
{
bool groupIM = e.IM.GroupIM && GroupMembers != null && GroupMembers.ContainsKey(e.IM.FromAgentID) ? true : false;
if (e.IM.FromAgentID == MasterKey || (GroupCommands && groupIM))
{
// Received an IM from someone that is authenticated
Console.WriteLine("<{0} ({1})> {2}: {3} (@{4}:{5})", e.IM.GroupIM ? "GroupIM" : "IM", e.IM.Dialog, e.IM.FromAgentName, e.IM.Message,
e.IM.RegionID, e.IM.Position);
if (e.IM.Dialog == InstantMessageDialog.RequestTeleport)
{
Console.WriteLine("Accepting teleport lure.");
Self.TeleportLureRespond(e.IM.FromAgentID, e.IM.IMSessionID, true);
}
else if (
e.IM.Dialog == InstantMessageDialog.MessageFromAgent ||
e.IM.Dialog == InstantMessageDialog.MessageFromObject)
{
ClientManager.Instance.DoCommandAll(e.IM.Message, e.IM.FromAgentID);
}
}
else
{
// Received an IM from someone that is not the bot's master, ignore
Console.WriteLine("<{0} ({1})> {2} (not master): {3} (@{4}:{5})", e.IM.GroupIM ? "GroupIM" : "IM", e.IM.Dialog, e.IM.FromAgentName, e.IM.Message,
e.IM.RegionID, e.IM.Position);
return;
}
}
/// <summary>
/// Initialize everything that needs to be initialized once we're logged in.
/// </summary>
/// <param name="login">The status of the login</param>
/// <param name="message">Error message on failure, MOTD on success.</param>
public void LoginHandler(object sender, LoginProgressEventArgs e)
{
if (e.Status == LoginStatus.Success)
{
// Start in the inventory root folder.
CurrentDirectory = Inventory.Store.RootFolder;
}
}
public void RegisterAllCommands(Assembly assembly)
{
foreach (Type t in assembly.GetTypes())
{
try
{
if (t.IsSubclassOf(typeof(Command)))
{
ConstructorInfo info = t.GetConstructor(new Type[] { typeof(TestClient) });
Command command = (Command)info.Invoke(new object[] { this });
RegisterCommand(command);
}
}
catch (Exception e)
{
Console.WriteLine(e.ToString());
}
}
}
public void RegisterCommand(Command command)
{
command.Client = this;
if (!Commands.ContainsKey(command.Name.ToLower()))
{
Commands.Add(command.Name.ToLower(), command);
}
}
public void ReloadGroupsCache()
{
Groups.CurrentGroups += Groups_CurrentGroups;
Groups.RequestCurrentGroups();
GroupsEvent.WaitOne(10000, false);
Groups.CurrentGroups -= Groups_CurrentGroups;
GroupsEvent.Reset();
}
void Groups_CurrentGroups(object sender, CurrentGroupsEventArgs e)
{
if (null == GroupsCache)
GroupsCache = e.Groups;
else
lock (GroupsCache) { GroupsCache = e.Groups; }
GroupsEvent.Set();
}
public UUID GroupName2UUID(String groupName)
{
UUID tryUUID;
if (UUID.TryParse(groupName,out tryUUID))
return tryUUID;
if (null == GroupsCache) {
ReloadGroupsCache();
if (null == GroupsCache)
return UUID.Zero;
}
lock(GroupsCache) {
if (GroupsCache.Count > 0) {
foreach (Group currentGroup in GroupsCache.Values)
if (currentGroup.Name.ToLower() == groupName.ToLower())
return currentGroup.ID;
}
}
return UUID.Zero;
}
private void updateTimer_Elapsed(object sender, System.Timers.ElapsedEventArgs e)
{
foreach (Command c in Commands.Values)
if (c.Active)
c.Think();
}
private void AgentDataUpdateHandler(object sender, PacketReceivedEventArgs e)
{
AgentDataUpdatePacket p = (AgentDataUpdatePacket)e.Packet;
if (p.AgentData.AgentID == e.Simulator.Client.Self.AgentID)
{
GroupID = p.AgentData.ActiveGroupID;
GroupMembersRequestID = e.Simulator.Client.Groups.RequestGroupMembers(GroupID);
}
}
private void GroupMembersHandler(object sender, GroupMembersReplyEventArgs e)
{
if (e.RequestID != GroupMembersRequestID) return;
GroupMembers = e.Members;
}
private void AvatarAppearanceHandler(object sender, PacketReceivedEventArgs e)
{
Packet packet = e.Packet;
AvatarAppearancePacket appearance = (AvatarAppearancePacket)packet;
lock (Appearances) Appearances[appearance.Sender.ID] = appearance;
}
private void AlertMessageHandler(object sender, PacketReceivedEventArgs e)
{
Packet packet = e.Packet;
AlertMessagePacket message = (AlertMessagePacket)packet;
Logger.Log("[AlertMessage] " + Utils.BytesToString(message.AlertData.Message), Helpers.LogLevel.Info, this);
}
private void Inventory_OnInventoryObjectReceived(object sender, InventoryObjectOfferedEventArgs e)
{
if (MasterKey != UUID.Zero)
{
if (e.Offer.FromAgentID != MasterKey)
return;
}
else if (GroupMembers != null && !GroupMembers.ContainsKey(e.Offer.FromAgentID))
{
return;
}
e.Accept = true;
return;
}
}
}
| |
//
// Copyright (c) 2004-2011 Jaroslaw Kowalski <[email protected]>
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
#if !SILVERLIGHT
namespace NLog.Internal.FileAppenders
{
using System;
using System.IO;
using System.Security;
using System.Security.AccessControl;
using System.Security.Cryptography;
using System.Security.Principal;
using System.Text;
using System.Threading;
using NLog.Common;
/// <summary>
/// Provides a multiprocess-safe atomic file appends while
/// keeping the files open.
/// </summary>
/// <remarks>
/// On Unix you can get all the appends to be atomic, even when multiple
/// processes are trying to write to the same file, because setting the file
/// pointer to the end of the file and appending can be made one operation.
/// On Win32 we need to maintain some synchronization between processes
/// (global named mutex is used for this)
/// </remarks>
[SecuritySafeCritical]
internal class MutexMultiProcessFileAppender : BaseFileAppender
{
public static readonly IFileAppenderFactory TheFactory = new Factory();
private FileStream file;
private Mutex mutex;
/// <summary>
/// Initializes a new instance of the <see cref="MutexMultiProcessFileAppender" /> class.
/// </summary>
/// <param name="fileName">Name of the file.</param>
/// <param name="parameters">The parameters.</param>
public MutexMultiProcessFileAppender(string fileName, ICreateFileParameters parameters) : base(fileName, parameters)
{
try
{
this.mutex = CreateSharableMutex(GetMutexName(fileName));
this.file = CreateFileStream(true);
}
catch
{
if (this.mutex != null)
{
this.mutex.Close();
this.mutex = null;
}
if (this.file != null)
{
this.file.Close();
this.file = null;
}
throw;
}
}
/// <summary>
/// Writes the specified bytes.
/// </summary>
/// <param name="bytes">The bytes to be written.</param>
public override void Write(byte[] bytes)
{
if (this.mutex == null)
{
return;
}
try
{
this.mutex.WaitOne();
}
catch (AbandonedMutexException)
{
// ignore the exception, another process was killed without properly releasing the mutex
// the mutex has been acquired, so proceed to writing
// See: http://msdn.microsoft.com/en-us/library/system.threading.abandonedmutexexception.aspx
}
try
{
this.file.Seek(0, SeekOrigin.End);
this.file.Write(bytes, 0, bytes.Length);
this.file.Flush();
FileTouched();
}
finally
{
this.mutex.ReleaseMutex();
}
}
/// <summary>
/// Closes this instance.
/// </summary>
public override void Close()
{
InternalLogger.Trace("Closing '{0}'", FileName);
if (this.mutex != null)
{
this.mutex.Close();
}
if (this.file != null)
{
this.file.Close();
}
this.mutex = null;
this.file = null;
FileTouched();
}
/// <summary>
/// Flushes this instance.
/// </summary>
public override void Flush()
{
// do nothing, the stream is always flushed
}
/// <summary>
/// Gets the file info.
/// </summary>
/// <param name="lastWriteTime">The last file write time. The value must be of UTC kind.</param>
/// <param name="fileLength">Length of the file.</param>
/// <returns>
/// True if the operation succeeded, false otherwise.
/// </returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2001:AvoidCallingProblematicMethods", MessageId = "System.Runtime.InteropServices.SafeHandle.DangerousGetHandle", Justification = "Optimization")]
public override bool GetFileInfo(out DateTime lastWriteTime, out long fileLength)
{
return FileInfoHelper.Helper.GetFileInfo(FileName, this.file.SafeFileHandle.DangerousGetHandle(), out lastWriteTime, out fileLength);
}
private static Mutex CreateSharableMutex(string name)
{
// Creates a mutex sharable by more than one process
var mutexSecurity = new MutexSecurity();
var everyoneSid = new SecurityIdentifier(WellKnownSidType.WorldSid, null);
mutexSecurity.AddAccessRule(new MutexAccessRule(everyoneSid, MutexRights.FullControl, AccessControlType.Allow));
// The constructor will either create new mutex or open
// an existing one, in a thread-safe manner
bool createdNew;
return new Mutex(false, name, out createdNew, mutexSecurity);
}
private static string GetMutexName(string fileName)
{
// The global kernel object namespace is used so the mutex
// can be shared among processes in all sessions
const string mutexNamePrefix = @"Global\NLog-FileLock-";
const int maxMutexNameLength = 260;
string canonicalName = Path.GetFullPath(fileName).ToLowerInvariant();
// Mutex names must not contain a backslash, it's the namespace separator,
// but all other are OK
canonicalName = canonicalName.Replace('\\', '/');
// A mutex name must not exceed MAX_PATH (260) characters
if (mutexNamePrefix.Length + canonicalName.Length <= maxMutexNameLength)
{
return mutexNamePrefix + canonicalName;
}
// The unusual case of the path being too long; let's hash the canonical name,
// so it can be safely shortened and still remain unique
string hash;
using (MD5 md5 = MD5.Create())
{
byte[] bytes = md5.ComputeHash(Encoding.UTF8.GetBytes(canonicalName));
hash = Convert.ToBase64String(bytes);
}
// The hash makes the name unique, but also add the end of the path,
// so the end of the name tells us which file it is (for debugging)
int cutOffIndex = canonicalName.Length - (maxMutexNameLength - mutexNamePrefix.Length - hash.Length);
return mutexNamePrefix + hash + canonicalName.Substring(cutOffIndex);
}
/// <summary>
/// Factory class.
/// </summary>
private class Factory : IFileAppenderFactory
{
/// <summary>
/// Opens the appender for given file name and parameters.
/// </summary>
/// <param name="fileName">Name of the file.</param>
/// <param name="parameters">Creation parameters.</param>
/// <returns>
/// Instance of <see cref="BaseFileAppender"/> which can be used to write to the file.
/// </returns>
BaseFileAppender IFileAppenderFactory.Open(string fileName, ICreateFileParameters parameters)
{
return new MutexMultiProcessFileAppender(fileName, parameters);
}
}
}
}
#endif
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Reflection;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.Collections.Tests
{
public static class HashtableTests
{
[Fact]
public static void Ctor_Empty()
{
var hash = new ComparableHashtable();
VerifyHashtable(hash, null, null);
}
[Fact]
public static void Ctor_IDictionary()
{
// No exception
var hash1 = new ComparableHashtable(new Hashtable());
Assert.Equal(0, hash1.Count);
hash1 = new ComparableHashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable())))));
Assert.Equal(0, hash1.Count);
Hashtable hash2 = Helpers.CreateIntHashtable(100);
hash1 = new ComparableHashtable(hash2);
VerifyHashtable(hash1, hash2, null);
}
[Fact]
public static void Ctor_IDictionary_NullDictionary_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("d", () => new Hashtable((IDictionary)null)); // Dictionary is null
}
[Fact]
public static void Ctor_IEqualityComparer()
{
// Null comparer
var hash = new ComparableHashtable((IEqualityComparer)null);
VerifyHashtable(hash, null, null);
// Custom comparer
Helpers.PerformActionOnCustomCulture(() =>
{
IEqualityComparer comparer = StringComparer.CurrentCulture;
hash = new ComparableHashtable(comparer);
VerifyHashtable(hash, null, comparer);
});
}
[Theory]
[InlineData(0)]
[InlineData(10)]
[InlineData(100)]
public static void Ctor_Int(int capacity)
{
var hash = new ComparableHashtable(capacity);
VerifyHashtable(hash, null, null);
}
[Fact]
public static void Ctor_Int_Invalid()
{
Assert.Throws<ArgumentOutOfRangeException>("capacity", () => new Hashtable(-1)); // Capacity < 0
Assert.Throws<ArgumentException>("capacity", () => new Hashtable(int.MaxValue)); // Capacity / load factor > int.MaxValue
}
[Fact]
public static void Ctor_IDictionary_Int()
{
// No exception
var hash1 = new ComparableHashtable(new Hashtable(), 1f);
Assert.Equal(0, hash1.Count);
hash1 = new ComparableHashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable(), 1f), 1f), 1f), 1f), 1f);
Assert.Equal(0, hash1.Count);
Hashtable hash2 = Helpers.CreateIntHashtable(100);
hash1 = new ComparableHashtable(hash2, 1f);
VerifyHashtable(hash1, hash2, null);
}
[Fact]
public static void Ctor_IDictionary_Int_Invalid()
{
Assert.Throws<ArgumentNullException>("d", () => new Hashtable(null, 1f)); // Dictionary is null
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), 0.09f)); // Load factor < 0.1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), 1.01f)); // Load factor > 1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), float.NaN)); // Load factor is NaN
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), float.PositiveInfinity)); // Load factor is infinity
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), float.NegativeInfinity)); // Load factor is infinity
}
[Fact]
public static void Ctor_IDictionary_IEqualityComparer()
{
// No exception
var hash1 = new ComparableHashtable(new Hashtable(), null);
Assert.Equal(0, hash1.Count);
hash1 = new ComparableHashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable(new Hashtable(), null), null), null), null), null);
Assert.Equal(0, hash1.Count);
// Null comparer
Hashtable hash2 = Helpers.CreateIntHashtable(100);
hash1 = new ComparableHashtable(hash2, null);
VerifyHashtable(hash1, hash2, null);
// Custom comparer
hash2 = Helpers.CreateIntHashtable(100);
Helpers.PerformActionOnCustomCulture(() =>
{
IEqualityComparer comparer = StringComparer.CurrentCulture;
hash1 = new ComparableHashtable(hash2, comparer);
VerifyHashtable(hash1, hash2, comparer);
});
}
[Fact]
public static void Ctor_IDictionary_IEqualityComparer_NullDictionary_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("d", () => new Hashtable(null, null)); // Dictionary is null
}
[Theory]
[InlineData(0, 0.1)]
[InlineData(10, 0.2)]
[InlineData(100, 0.3)]
[InlineData(1000, 1)]
public static void Ctor_Int_Int(int capacity, float loadFactor)
{
var hash = new ComparableHashtable(capacity, loadFactor);
VerifyHashtable(hash, null, null);
}
[Fact]
public static void Ctor_Int_Int_GenerateNewPrime()
{
// The ctor for Hashtable performs the following calculation:
// rawSize = capacity / (loadFactor * 0.72)
// If rawSize is > 3, then it calls HashHelpers.GetPrime(rawSize) to generate a prime.
// Then, if the rawSize > 7,199,369 (the largest number in a list of known primes), we have to generate a prime programatically
// This test makes sure this works.
int capacity = 8000000;
float loadFactor = 0.1f / 0.72f;
try
{
var hash = new ComparableHashtable(capacity, loadFactor);
}
catch (OutOfMemoryException)
{
// On memory constrained devices, we can get an OutOfMemoryException, which we can safely ignore.
}
}
[Fact]
public static void Ctor_Int_Int_Invalid()
{
Assert.Throws<ArgumentOutOfRangeException>("capacity", () => new Hashtable(-1, 1f)); // Capacity < 0
Assert.Throws<ArgumentException>("capacity", () => new Hashtable(int.MaxValue, 0.1f)); // Capacity / load factor > int.MaxValue
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, 0.09f)); // Load factor < 0.1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, 1.01f)); // Load factor > 1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, float.NaN)); // Load factor is NaN
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, float.PositiveInfinity)); // Load factor is infinity
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, float.NegativeInfinity)); // Load factor is infinity
}
[Theory]
[InlineData(0)]
[InlineData(10)]
[InlineData(100)]
[InlineData(1000)]
public static void Ctor_Int_IEqualityComparer(int capacity)
{
// Null comparer
var hash = new ComparableHashtable(capacity, null);
VerifyHashtable(hash, null, null);
// Custom comparer
Helpers.PerformActionOnCustomCulture(() =>
{
IEqualityComparer comparer = StringComparer.CurrentCulture;
hash = new ComparableHashtable(capacity, comparer);
VerifyHashtable(hash, null, comparer);
});
}
[Fact]
public static void Ctor_Int_IEqualityComparer_Invalid()
{
Assert.Throws<ArgumentOutOfRangeException>("capacity", () => new Hashtable(-1, null)); // Capacity < 0
Assert.Throws<ArgumentException>("capacity", () => new Hashtable(int.MaxValue, null)); // Capacity / load factor > int.MaxValue
}
[Fact]
public static void Ctor_IDictionary_Int_IEqualityComparer()
{
// No exception
var hash1 = new ComparableHashtable(new Hashtable(), 1f, null);
Assert.Equal(0, hash1.Count);
hash1 = new ComparableHashtable(new Hashtable(new Hashtable(
new Hashtable(new Hashtable(new Hashtable(), 1f, null), 1f, null), 1f, null), 1f, null), 1f, null);
Assert.Equal(0, hash1.Count);
// Null comparer
Hashtable hash2 = Helpers.CreateIntHashtable(100);
hash1 = new ComparableHashtable(hash2, 1f, null);
VerifyHashtable(hash1, hash2, null);
hash2 = Helpers.CreateIntHashtable(100);
// Custom comparer
Helpers.PerformActionOnCustomCulture(() =>
{
IEqualityComparer comparer = StringComparer.CurrentCulture;
hash1 = new ComparableHashtable(hash2, 1f, comparer);
VerifyHashtable(hash1, hash2, comparer);
});
}
[Fact]
public static void Ctor_IDictionary_LoadFactor_IEqualityComparer_Invalid()
{
Assert.Throws<ArgumentNullException>("d", () => new Hashtable(null, 1f, null)); // Dictionary is null
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), 0.09f, null)); // Load factor < 0.1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), 1.01f, null)); // Load factor > 1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), float.NaN, null)); // Load factor is NaN
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), float.PositiveInfinity, null)); // Load factor is infinity
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(new Hashtable(), float.NegativeInfinity, null)); // Load factor is infinity
}
[Theory]
[InlineData(0, 0.1)]
[InlineData(10, 0.2)]
[InlineData(100, 0.3)]
[InlineData(1000, 1)]
public static void Ctor_Int_Int_IEqualityComparer(int capacity, float loadFactor)
{
// Null comparer
var hash = new ComparableHashtable(capacity, loadFactor, null);
VerifyHashtable(hash, null, null);
Assert.Null(hash.EqualityComparer);
// Custom comparer
Helpers.PerformActionOnCustomCulture(() =>
{
IEqualityComparer comparer = StringComparer.CurrentCulture;
hash = new ComparableHashtable(capacity, loadFactor, comparer);
VerifyHashtable(hash, null, comparer);
});
}
[Fact]
public static void Ctor_Capacity_LoadFactor_IEqualityComparer_Invalid()
{
Assert.Throws<ArgumentOutOfRangeException>("capacity", () => new Hashtable(-1, 1f, null)); // Capacity < 0
Assert.Throws<ArgumentException>("capacity", () => new Hashtable(int.MaxValue, 0.1f, null)); // Capacity / load factor > int.MaxValue
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, 0.09f, null)); // Load factor < 0.1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, 1.01f, null)); // Load factor > 1f
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, float.NaN, null)); // Load factor is NaN
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, float.PositiveInfinity, null)); // Load factor is infinity
Assert.Throws<ArgumentOutOfRangeException>("loadFactor", () => new Hashtable(100, float.NegativeInfinity, null)); // Load factor is infinity
}
[Fact]
public static void DebuggerAttribute()
{
DebuggerAttributes.ValidateDebuggerDisplayReferences(new Hashtable());
var hash = new Hashtable() { { "a", 1 }, { "b", 2 } };
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(hash);
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(typeof(Hashtable), Hashtable.Synchronized(hash));
bool threwNull = false;
try
{
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(typeof(Hashtable), null);
}
catch (TargetInvocationException ex)
{
threwNull = ex.InnerException is ArgumentNullException;
}
Assert.True(threwNull);
}
[Fact]
public static void Add_ReferenceType()
{
var hash1 = new Hashtable();
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
// Value is a reference
var foo = new Foo();
hash2.Add("Key", foo);
Assert.Equal("Hello World", ((Foo)hash2["Key"]).StringValue);
// Changing original object should change the object stored in the Hashtable
foo.StringValue = "Goodbye";
Assert.Equal("Goodbye", ((Foo)hash2["Key"]).StringValue);
});
}
[Fact]
public static void Add_ClearRepeatedly()
{
const int Iterations = 2;
const int Count = 2;
var hash = new Hashtable();
for (int i = 0; i < Iterations; i++)
{
for (int j = 0; j < Count; j++)
{
string key = "Key: i=" + i + ", j=" + j;
string value = "Value: i=" + i + ", j=" + j;
hash.Add(key, value);
}
Assert.Equal(Count, hash.Count);
hash.Clear();
}
}
[Fact]
[OuterLoop]
public static void AddRemove_LargeAmountNumbers()
{
// Generate a random 100,000 array of ints as test data
var inputData = new int[100000];
var random = new Random(341553);
for (int i = 0; i < inputData.Length; i++)
{
inputData[i] = random.Next(7500000, int.MaxValue);
}
var hash = new Hashtable();
int count = 0;
foreach (long number in inputData)
{
hash.Add(number, count++);
}
count = 0;
foreach (long number in inputData)
{
Assert.Equal(hash[number], count);
Assert.True(hash.ContainsKey(number));
count++;
}
foreach (long number in inputData)
{
hash.Remove(number);
}
Assert.Equal(0, hash.Count);
}
[Fact]
public static void DuplicatedKeysWithInitialCapacity()
{
// Make rehash get called because to many items with duplicated keys have been added to the hashtable
var hash = new Hashtable(200);
const int Iterations = 1600;
for (int i = 0; i < Iterations; i += 2)
{
hash.Add(new BadHashCode(i), i.ToString());
hash.Add(new BadHashCode(i + 1), (i + 1).ToString());
hash.Remove(new BadHashCode(i));
hash.Remove(new BadHashCode(i + 1));
}
for (int i = 0; i < Iterations; i++)
{
hash.Add(i.ToString(), i);
}
for (int i = 0; i < Iterations; i++)
{
Assert.Equal(i, hash[i.ToString()]);
}
}
[Fact]
public static void DuplicatedKeysWithDefaultCapacity()
{
// Make rehash get called because to many items with duplicated keys have been added to the hashtable
var hash = new Hashtable();
const int Iterations = 1600;
for (int i = 0; i < Iterations; i += 2)
{
hash.Add(new BadHashCode(i), i.ToString());
hash.Add(new BadHashCode(i + 1), (i + 1).ToString());
hash.Remove(new BadHashCode(i));
hash.Remove(new BadHashCode(i + 1));
}
for (int i = 0; i < Iterations; i++)
{
hash.Add(i.ToString(), i);
}
for (int i = 0; i < Iterations; i++)
{
Assert.Equal(i, hash[i.ToString()]);
}
}
[Theory]
[InlineData(0)]
[InlineData(100)]
public static void Clone(int count)
{
Hashtable hash1 = Helpers.CreateStringHashtable(count);
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
Hashtable clone = (Hashtable)hash2.Clone();
Assert.Equal(hash2.Count, clone.Count);
Assert.Equal(hash2.IsSynchronized, clone.IsSynchronized);
Assert.Equal(hash2.IsFixedSize, clone.IsFixedSize);
Assert.Equal(hash2.IsReadOnly, clone.IsReadOnly);
for (int i = 0; i < clone.Count; i++)
{
string key = "Key_" + i;
string value = "Value_" + i;
Assert.True(clone.ContainsKey(key));
Assert.True(clone.ContainsValue(value));
Assert.Equal(value, clone[key]);
}
});
}
[Fact]
public static void Clone_IsShallowCopy()
{
var hash = new Hashtable();
for (int i = 0; i < 10; i++)
{
hash.Add(i, new Foo());
}
Hashtable clone = (Hashtable)hash.Clone();
for (int i = 0; i < clone.Count; i++)
{
Assert.Equal("Hello World", ((Foo)clone[i]).StringValue);
Assert.Same(hash[i], clone[i]);
}
// Change object in original hashtable
((Foo)hash[1]).StringValue = "Goodbye";
Assert.Equal("Goodbye", ((Foo)clone[1]).StringValue);
// Removing an object from the original hashtable doesn't change the clone
hash.Remove(0);
Assert.True(clone.Contains(0));
}
[Fact]
public static void Clone_HashtableCastedToInterfaces()
{
// Try to cast the returned object from Clone() to different types
Hashtable hash = Helpers.CreateIntHashtable(100);
ICollection collection = (ICollection)hash.Clone();
Assert.Equal(hash.Count, collection.Count);
IDictionary dictionary = (IDictionary)hash.Clone();
Assert.Equal(hash.Count, dictionary.Count);
}
[Fact]
public static void ContainsKey()
{
Hashtable hash1 = Helpers.CreateStringHashtable(100);
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
for (int i = 0; i < hash2.Count; i++)
{
string key = "Key_" + i;
Assert.True(hash2.ContainsKey(key));
Assert.True(hash2.Contains(key));
}
Assert.False(hash2.ContainsKey("Non Existent Key"));
Assert.False(hash2.Contains("Non Existent Key"));
Assert.False(hash2.ContainsKey(101));
Assert.False(hash2.Contains("Non Existent Key"));
string removedKey = "Key_1";
hash2.Remove(removedKey);
Assert.False(hash2.ContainsKey(removedKey));
Assert.False(hash2.Contains(removedKey));
});
}
[Fact]
public static void ContainsKey_EqualObjects()
{
var hash1 = new Hashtable();
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
var foo1 = new Foo() { StringValue = "Goodbye" };
var foo2 = new Foo() { StringValue = "Goodbye" };
hash2.Add(foo1, 101);
Assert.True(hash2.ContainsKey(foo2));
Assert.True(hash2.Contains(foo2));
int i1 = 0x10;
int i2 = 0x100;
long l1 = (((long)i1) << 32) + i2; // Create two longs with same hashcode
long l2 = (((long)i2) << 32) + i1;
hash2.Add(l1, 101);
hash2.Add(l2, 101); // This will cause collision bit of the first entry to be set
Assert.True(hash2.ContainsKey(l1));
Assert.True(hash2.Contains(l1));
hash2.Remove(l1); // Remove the first item
Assert.False(hash2.ContainsKey(l1));
Assert.False(hash2.Contains(l1));
Assert.True(hash2.ContainsKey(l2));
Assert.True(hash2.Contains(l2));
});
}
[Fact]
public static void ContainsKey_NullKey_ThrowsArgumentNullException()
{
var hash1 = new Hashtable();
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
Assert.Throws<ArgumentNullException>("key", () => hash2.ContainsKey(null)); // Key is null
Assert.Throws<ArgumentNullException>("key", () => hash2.Contains(null)); // Key is null
});
}
[Fact]
public static void ContainsValue()
{
Hashtable hash1 = Helpers.CreateStringHashtable(100);
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
for (int i = 0; i < hash2.Count; i++)
{
string value = "Value_" + i;
Assert.True(hash2.ContainsValue(value));
}
Assert.False(hash2.ContainsValue("Non Existent Value"));
Assert.False(hash2.ContainsValue(101));
Assert.False(hash2.ContainsValue(null));
hash2.Add("Key_101", null);
Assert.True(hash2.ContainsValue(null));
string removedKey = "Key_1";
string removedValue = "Value_1";
hash2.Remove(removedKey);
Assert.False(hash2.ContainsValue(removedValue));
});
}
[Fact]
public static void ContainsValue_EqualObjects()
{
var hash1 = new Hashtable();
Helpers.PerformActionOnAllHashtableWrappers(hash1, hash2 =>
{
var foo1 = new Foo() { StringValue = "Goodbye" };
var foo2 = new Foo() { StringValue = "Goodbye" };
hash2.Add(101, foo1);
Assert.True(hash2.ContainsValue(foo2));
});
}
[Fact]
public static void Keys_ModifyingHashtable_ModifiesCollection()
{
Hashtable hash = Helpers.CreateStringHashtable(100);
ICollection keys = hash.Keys;
// Removing a key from the hashtable should update the Keys ICollection.
// This means that the Keys ICollection no longer contains the key.
hash.Remove("Key_0");
IEnumerator enumerator = keys.GetEnumerator();
while (enumerator.MoveNext())
{
Assert.NotEqual("Key_0", enumerator.Current);
}
}
[Fact]
public static void Remove_SameHashcode()
{
// We want to add and delete items (with the same hashcode) to the hashtable in such a way that the hashtable
// does not expand but have to tread through collision bit set positions to insert the new elements. We do this
// by creating a default hashtable of size 11 (with the default load factor of 0.72), this should mean that
// the hashtable does not expand as long as we have at most 7 elements at any given time?
var hash = new Hashtable();
var arrList = new ArrayList();
for (int i = 0; i < 7; i++)
{
var hashConfuse = new BadHashCode(i);
arrList.Add(hashConfuse);
hash.Add(hashConfuse, i);
}
var rand = new Random(-55);
int iCount = 7;
for (int i = 0; i < 100; i++)
{
for (int j = 0; j < 7; j++)
{
Assert.Equal(hash[arrList[j]], ((BadHashCode)arrList[j]).Value);
}
// Delete 3 elements from the hashtable
for (int j = 0; j < 3; j++)
{
int iElement = rand.Next(6);
hash.Remove(arrList[iElement]);
Assert.False(hash.ContainsValue(null));
arrList.RemoveAt(iElement);
int testInt = iCount++;
var hashConfuse = new BadHashCode(testInt);
arrList.Add(hashConfuse);
hash.Add(hashConfuse, testInt);
}
}
}
[Fact]
public static void SynchronizedProperties()
{
// Ensure Synchronized correctly reflects a wrapped hashtable
var hash1 = Helpers.CreateStringHashtable(100);
var hash2 = Hashtable.Synchronized(hash1);
Assert.Equal(hash1.Count, hash2.Count);
Assert.Equal(hash1.IsReadOnly, hash2.IsReadOnly);
Assert.Equal(hash1.IsFixedSize, hash2.IsFixedSize);
Assert.True(hash2.IsSynchronized);
Assert.Equal(hash1.SyncRoot, hash2.SyncRoot);
for (int i = 0; i < hash2.Count; i++)
{
Assert.Equal("Value_" + i, hash2["Key_" + i]);
}
}
[Fact]
public static void Synchronized_NullTable_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>("table", () => Hashtable.Synchronized(null)); // Table is null
}
[Fact]
public static void Values_ModifyingHashtable_ModifiesCollection()
{
Hashtable hash = Helpers.CreateStringHashtable(100);
ICollection values = hash.Values;
// Removing a value from the hashtable should update the Values ICollection.
// This means that the Values ICollection no longer contains the value.
hash.Remove("Key_0");
IEnumerator enumerator = values.GetEnumerator();
while (enumerator.MoveNext())
{
Assert.NotEqual("Value_0", enumerator.Current);
}
}
private static void VerifyHashtable(ComparableHashtable hash1, Hashtable hash2, IEqualityComparer ikc)
{
if (hash2 == null)
{
Assert.Equal(0, hash1.Count);
}
else
{
// Make sure that construtor imports all keys and values
Assert.Equal(hash2.Count, hash1.Count);
for (int i = 0; i < 100; i++)
{
Assert.True(hash1.ContainsKey(i));
Assert.True(hash1.ContainsValue(i));
}
// Make sure the new and old hashtables are not linked
hash2.Clear();
for (int i = 0; i < 100; i++)
{
Assert.True(hash1.ContainsKey(i));
Assert.True(hash1.ContainsValue(i));
}
}
Assert.Equal(ikc, hash1.EqualityComparer);
Assert.False(hash1.IsFixedSize);
Assert.False(hash1.IsReadOnly);
Assert.False(hash1.IsSynchronized);
// Make sure we can add to the hashtable
int count = hash1.Count;
for (int i = count; i < count + 100; i++)
{
hash1.Add(i, i);
Assert.True(hash1.ContainsKey(i));
Assert.True(hash1.ContainsValue(i));
}
}
private class ComparableHashtable : Hashtable
{
public ComparableHashtable() : base() { }
public ComparableHashtable(int capacity) : base(capacity) { }
public ComparableHashtable(int capacity, float loadFactor) : base(capacity, loadFactor) { }
public ComparableHashtable(int capacity, IEqualityComparer ikc) : base(capacity, ikc) { }
public ComparableHashtable(IEqualityComparer ikc) : base(ikc) { }
public ComparableHashtable(IDictionary d) : base(d) { }
public ComparableHashtable(IDictionary d, float loadFactor) : base(d, loadFactor) { }
public ComparableHashtable(IDictionary d, IEqualityComparer ikc) : base(d, ikc) { }
public ComparableHashtable(IDictionary d, float loadFactor, IEqualityComparer ikc) : base(d, loadFactor, ikc) { }
public ComparableHashtable(int capacity, float loadFactor, IEqualityComparer ikc) : base(capacity, loadFactor, ikc) { }
public new IEqualityComparer EqualityComparer => base.EqualityComparer;
}
private class BadHashCode
{
public BadHashCode(int value)
{
Value = value;
}
public int Value { get; private set; }
public override bool Equals(object o)
{
BadHashCode rhValue = o as BadHashCode;
if (rhValue != null)
{
return Value.Equals(rhValue.Value);
}
else
{
throw new ArgumentException("is not BadHashCode type actual " + o.GetType(), nameof(o));
}
}
// Return 0 for everything to force hash collisions.
public override int GetHashCode() => 0;
public override string ToString() => Value.ToString();
}
private class Foo
{
public string StringValue { get; set; } = "Hello World";
public override bool Equals(object obj)
{
Foo foo = obj as Foo;
return foo != null && StringValue == foo.StringValue;
}
public override int GetHashCode() => StringValue.GetHashCode();
}
}
/// <summary>
/// A hashtable can have a race condition:
/// A read operation on hashtable has three steps:
/// (1) calculate the hash and find the slot number.
/// (2) compare the hashcode, if equal, go to step 3. Otherwise end.
/// (3) compare the key, if equal, go to step 4. Otherwise end.
/// (4) return the value contained in the bucket.
/// The problem is that after step 3 and before step 4. A writer can kick in a remove the old item and add a new one
/// in the same bukcet. In order to make this happen easily, I created two long with same hashcode.
/// </summary>
public class Hashtable_ItemThreadSafetyTests
{
private object _key1;
private object _key2;
private object _value1 = "value1";
private object _value2 = "value2";
private Hashtable _hash;
private bool _errorOccurred = false;
private bool _timeExpired = false;
private const int MAX_TEST_TIME_MS = 10000; // 10 seconds
[Fact]
[OuterLoop]
public void GetItem_ThreadSafety()
{
int i1 = 0x10;
int i2 = 0x100;
// Setup key1 and key2 so they are different values but have the same hashcode
// To produce a hashcode long XOR's the first 32bits with the last 32 bits
long l1 = (((long)i1) << 32) + i2;
long l2 = (((long)i2) << 32) + i1;
_key1 = l1;
_key2 = l2;
_hash = new Hashtable(3); // Just one item will be in the hashtable at a time
int taskCount = 3;
var readers1 = new Task[taskCount];
var readers2 = new Task[taskCount];
Stopwatch stopwatch = Stopwatch.StartNew();
for (int i = 0; i < readers1.Length; i++)
{
readers1[i] = Task.Run(new Action(ReaderFunction1));
}
for (int i = 0; i < readers2.Length; i++)
{
readers2[i] = Task.Run(new Action(ReaderFunction2));
}
Task writer = Task.Run(new Action(WriterFunction));
var spin = new SpinWait();
while (!_errorOccurred && !_timeExpired)
{
if (MAX_TEST_TIME_MS < stopwatch.ElapsedMilliseconds)
{
_timeExpired = true;
}
spin.SpinOnce();
}
Task.WaitAll(readers1);
Task.WaitAll(readers2);
writer.Wait();
Assert.False(_errorOccurred);
}
private void ReaderFunction1()
{
while (!_timeExpired)
{
object value = _hash[_key1];
if (value != null)
{
Assert.NotEqual(value, _value2);
}
}
}
private void ReaderFunction2()
{
while (!_errorOccurred && !_timeExpired)
{
object value = _hash[_key2];
if (value != null)
{
Assert.NotEqual(value, _value1);
}
}
}
private void WriterFunction()
{
while (!_errorOccurred && !_timeExpired)
{
_hash.Add(_key1, _value1);
_hash.Remove(_key1);
_hash.Add(_key2, _value2);
_hash.Remove(_key2);
}
}
}
public class Hashtable_SynchronizedTests
{
private Hashtable _hash2;
private int _iNumberOfElements = 20;
[Fact]
[OuterLoop]
public void SynchronizedThreadSafety()
{
const int NumberOfWorkers = 3;
// Synchronized returns a hashtable that is thread safe
// We will try to test this by getting a number of threads to write some items
// to a synchronized IList
var hash1 = new Hashtable();
_hash2 = Hashtable.Synchronized(hash1);
var workers = new Task[NumberOfWorkers];
for (int i = 0; i < workers.Length; i++)
{
var name = "Thread worker " + i;
var task = new Action(() => AddElements(name));
workers[i] = Task.Run(task);
}
Task.WaitAll(workers);
// Check time
Assert.Equal(_hash2.Count, _iNumberOfElements * NumberOfWorkers);
for (int i = 0; i < NumberOfWorkers; i++)
{
for (int j = 0; j < _iNumberOfElements; j++)
{
string strValue = "Thread worker " + i + "_" + j;
Assert.True(_hash2.Contains(strValue));
}
}
// We cannot can make an assumption on the order of these items but
// now we are going to remove all of these
workers = new Task[NumberOfWorkers];
for (int i = 0; i < workers.Length; i++)
{
string name = "Thread worker " + i;
var task = new Action(() => RemoveElements(name));
workers[i] = Task.Run(task);
}
Task.WaitAll(workers);
Assert.Equal(_hash2.Count, 0);
}
private void AddElements(string strName)
{
for (int i = 0; i < _iNumberOfElements; i++)
{
_hash2.Add(strName + "_" + i, "string_" + i);
}
}
private void RemoveElements(string strName)
{
for (int i = 0; i < _iNumberOfElements; i++)
{
_hash2.Remove(strName + "_" + i);
}
}
}
public class Hashtable_SyncRootTests
{
private Hashtable _hashDaughter;
private Hashtable _hashGrandDaughter;
private const int NumberOfElements = 100;
[Fact]
public void SyncRoot()
{
// Different hashtables have different SyncRoots
var hash1 = new Hashtable();
var hash2 = new Hashtable();
Assert.NotEqual(hash1.SyncRoot, hash2.SyncRoot);
Assert.Equal(hash1.SyncRoot.GetType(), typeof(object));
// Cloned hashtables have different SyncRoots
hash1 = new Hashtable();
hash2 = Hashtable.Synchronized(hash1);
Hashtable hash3 = (Hashtable)hash2.Clone();
Assert.NotEqual(hash2.SyncRoot, hash3.SyncRoot);
Assert.NotEqual(hash1.SyncRoot, hash3.SyncRoot);
// Testing SyncRoot is not as simple as its implementation looks like. This is the working
// scenario we have in mind.
// 1) Create your Down to earth mother Hashtable
// 2) Get a synchronized wrapper from it
// 3) Get a Synchronized wrapper from 2)
// 4) Get a synchronized wrapper of the mother from 1)
// 5) all of these should SyncRoot to the mother earth
var hashMother = new Hashtable();
for (int i = 0; i < NumberOfElements; i++)
{
hashMother.Add("Key_" + i, "Value_" + i);
}
Hashtable hashSon = Hashtable.Synchronized(hashMother);
_hashGrandDaughter = Hashtable.Synchronized(hashSon);
_hashDaughter = Hashtable.Synchronized(hashMother);
Assert.Equal(hashSon.SyncRoot, hashMother.SyncRoot);
Assert.Equal(hashSon.SyncRoot, hashMother.SyncRoot);
Assert.Equal(_hashGrandDaughter.SyncRoot, hashMother.SyncRoot);
Assert.Equal(_hashDaughter.SyncRoot, hashMother.SyncRoot);
Assert.Equal(hashSon.SyncRoot, hashMother.SyncRoot);
// We are going to rumble with the Hashtables with some threads
int iNumberOfWorkers = 30;
var workers = new Task[iNumberOfWorkers];
var ts2 = new Action(RemoveElements);
for (int iThreads = 0; iThreads < iNumberOfWorkers; iThreads += 2)
{
var name = "Thread_worker_" + iThreads;
var ts1 = new Action(() => AddMoreElements(name));
workers[iThreads] = Task.Run(ts1);
workers[iThreads + 1] = Task.Run(ts2);
}
Task.WaitAll(workers);
// Check:
// Either there should be some elements (the new ones we added and/or the original ones) or none
var hshPossibleValues = new Hashtable();
for (int i = 0; i < NumberOfElements; i++)
{
hshPossibleValues.Add("Key_" + i, "Value_" + i);
}
for (int i = 0; i < iNumberOfWorkers; i++)
{
hshPossibleValues.Add("Key_Thread_worker_" + i, "Thread_worker_" + i);
}
IDictionaryEnumerator idic = hashMother.GetEnumerator();
while (idic.MoveNext())
{
Assert.True(hshPossibleValues.ContainsKey(idic.Key));
Assert.True(hshPossibleValues.ContainsValue(idic.Value));
}
}
private void AddMoreElements(string threadName)
{
_hashGrandDaughter.Add("Key_" + threadName, threadName);
}
private void RemoveElements()
{
_hashDaughter.Clear();
}
}
}
| |
//
// Klak - Utilities for creative coding with Unity
//
// Copyright (C) 2016 Keijiro Takahashi
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
using UnityEngine;
using UnityEditor;
using System.Collections.Generic;
using System;
using System.Linq;
using Graphs = UnityEditor.Graphs;
namespace Klak.Wiring.Patcher
{
// Specialized editor GUI class
public class GraphGUI : Graphs.GraphGUI
{
#region Node selection stack
// This is needed to keep selections over graph reconstruction (SyncWithPatch).
// We don't care about edge selection.
Stack<string> _selectionStack;
public void PushSelection()
{
if (_selectionStack == null)
_selectionStack = new Stack<string>();
else
_selectionStack.Clear();
foreach (Node node in selection)
if (node != null) _selectionStack.Push(node.name);
}
public void PopSelection()
{
selection.Clear();
while (_selectionStack.Count > 0)
{
var found = graph.GetNodeByName(_selectionStack.Pop());
if (found != null) selection.Add(found);
}
_selectionStack.Clear();
UpdateUnitySelection();
}
#endregion
#region Pasteboard support
// This is a pretty hackish implementation. It exploits the standard
// pasteboard APIs (unsupported) in a weird way.
int _pasteOffset;
// Copy
protected override void CopyNodesToPasteboard()
{
// Do nothing if nothing is selected.
if (selection.Count == 0) return;
// Select the nodes in the scene hierarchy.
Selection.objects = selection.Select(n => ((Node)n).runtimeInstance.gameObject).ToArray();
// Copy them to the pasteboard.
Unsupported.CopyGameObjectsToPasteboard();
// Recover the selection.
UpdateUnitySelection();
// Reset the pasting offset counter.
_pasteOffset = 1;
}
// Paste
protected override void PasteNodesFromPasteboard()
{
var g = (Graph)graph;
// Create a paste point. It has two level depth.
var point1 = new GameObject("<PastePoint1>");
var point2 = new GameObject("<PastePoint2>");
point1.transform.parent = g.patch.transform;
point2.transform.parent = point1.transform;
// Select the paste point in the scene hierarchy.
Selection.activeGameObject = point2;
// Paste from the pasteboard.
Unsupported.PasteGameObjectsFromPasteboard();
// Point2 (placeholder) is not needed anymore.
DestroyImmediate(point2);
// Move pasted objects to the right position.
var instances = point1.GetComponentsInChildren<Wiring.NodeBase>();
foreach (var i in instances)
i.transform.parent = g.patch.transform;
// Point1 (group) is not needed anymore.
DestroyImmediate(point1);
// Resync the graph.
g.Invalidate();
g.SyncWithPatch();
// Select and offset the pasted nodes.
ClearSelection();
foreach (var i in instances)
{
var node = graph[i.GetInstanceID().ToString()];
node.position.position += Vector2.one * (_pasteOffset * kNodeGridSize * 2);
node.Dirty();
selection.Add(node);
}
_pasteOffset++;
}
// Duplicate
protected override void DuplicateNodesThroughPasteboard()
{
// Do nothing if nothing is selected.
if (selection.Count == 0) return;
CopyNodesToPasteboard();
PasteNodesFromPasteboard();
}
#endregion
#region Customized GUI
public override Graphs.IEdgeGUI edgeGUI {
get {
if (m_EdgeGUI == null)
m_EdgeGUI = new EdgeGUI { host = this };
return m_EdgeGUI;
}
}
public override void NodeGUI(Graphs.Node node)
{
SelectNode(node);
foreach (var slot in node.inputSlots)
LayoutSlot(slot, slot.title, false, true, true, Styles.pinIn);
node.NodeUI(this);
foreach (var slot in node.outputSlots)
{
EditorGUILayout.BeginHorizontal();
GUILayout.FlexibleSpace();
LayoutSlot(slot, slot.title, true, false, true, Styles.pinOut);
EditorGUILayout.EndHorizontal();
}
DragNodes();
}
public override void OnGraphGUI()
{
// Show node subwindows.
m_Host.BeginWindows();
foreach (var node in graph.nodes)
{
// Recapture the variable for the delegate.
var node2 = node;
// Subwindow style (active/nonactive)
var isActive = selection.Contains(node);
var style = Graphs.Styles.GetNodeStyle(node.style, node.color, isActive);
// Show the subwindow of this node.
node.position = GUILayout.Window(
node.GetInstanceID(), node.position,
delegate { NodeGUI(node2); },
node.title, style, GUILayout.Width(150)
);
}
// Workaround: If there is no node in the graph, put an empty
// window to avoid corruption due to a bug.
if (graph.nodes.Count == 0)
GUILayout.Window(0, new Rect(0, 0, 1, 1), delegate {}, "", "MiniLabel");
m_Host.EndWindows();
// Graph edges
edgeGUI.DoEdges();
edgeGUI.DoDraggedEdge();
// Mouse drag
DragSelection(new Rect(-5000, -5000, 10000, 10000));
// Context menu
ShowCustomContextMenu();
HandleMenuEvents();
}
#endregion
#region Customized context menu
void ShowCustomContextMenu()
{
// Only cares about single right click.
if (Event.current.type != EventType.MouseDown) return;
if (Event.current.button != 1) return;
if (Event.current.clickCount != 1) return;
// Consume this mouse event.
Event.current.Use();
// Record the current mouse position
m_contextMenuMouseDownPosition = Event.current.mousePosition;
// Build a context menu.
var menu = new GenericMenu();
if (selection.Count != 0)
{
// Node operations
menu.AddItem(new GUIContent("Cut"), false, ContextMenuCallback, "Cut");
menu.AddItem(new GUIContent("Copy"), false, ContextMenuCallback, "Copy");
menu.AddItem(new GUIContent("Duplicate"), false, ContextMenuCallback, "Duplicate");
menu.AddSeparator("");
menu.AddItem(new GUIContent("Delete"), false, ContextMenuCallback, "Delete");
}
else if (edgeGUI.edgeSelection.Count != 0)
{
// Edge operations
menu.AddItem(new GUIContent("Delete"), false, ContextMenuCallback, "Delete");
}
else
{
// Clicked on empty space.
menu.AddItem(new GUIContent("Paste"), false, ContextMenuCallback, "Paste");
}
// "Create" menu
menu.AddSeparator("");
NodeFactory.AddNodeItemsToMenu(menu, CreateMenuItemCallback);
menu.ShowAsContext();
}
void ContextMenuCallback(object data)
{
m_Host.SendEvent(EditorGUIUtility.CommandEvent((string)data));
}
void CreateMenuItemCallback(object data)
{
var type = data as Type;
// Create a game object.
var name = ObjectNames.NicifyVariableName(type.Name);
var gameObject = new GameObject(name);
var nodeRuntime = (Wiring.NodeBase)gameObject.AddComponent(type);
gameObject.transform.parent = ((Graph)graph).patch.transform;
// Add it to the graph.
var node = Node.Create(nodeRuntime);
node.position = new Rect((Vector2)m_contextMenuMouseDownPosition, Vector2.zero);
node.Dirty();
graph.AddNode(node);
// Select the new node.
ClearSelection();
selection.Add(node);
UpdateUnitySelection();
// Make it undo-able.
Undo.RegisterCreatedObjectUndo(gameObject, "New Node");
}
#endregion
}
}
| |
//
// WaitQueues.cs
//
// Author(s):
// Alessio Parma <[email protected]>
//
// Copyright (c) 2012-2016 Alessio Parma <[email protected]>
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
namespace DIBRIS.Dessert.Resources
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using Core;
using Troschuetz.Random;
using Troschuetz.Random.Generators;
using Finsa.CodeServices.Common.Collections;
interface IWaitQueue<T> : ICollection<T>
{
T First { get; }
WaitPolicy Policy { get; }
void Add(T item, double priority);
T RemoveFirst();
}
abstract class WaitQueueBase<T> : IWaitQueue<T>
{
#region ICollection Members
public bool IsReadOnly
{
get { throw new DessertException(ErrorMessages.InternalError); }
}
public void Add(T item)
{
Add(item, Default.Priority);
}
public void Clear()
{
throw new DessertException(ErrorMessages.InternalError);
}
public void CopyTo(T[] array, int arrayIndex)
{
throw new DessertException(ErrorMessages.InternalError);
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
#endregion
public abstract int Count { get; }
public abstract T First { get; }
public abstract WaitPolicy Policy { get; }
public abstract void Add(T item, double priority);
public abstract bool Contains(T item);
public abstract IEnumerator<T> GetEnumerator();
public abstract bool Remove(T item);
public abstract T RemoveFirst();
}
static class WaitQueue
{
public static IWaitQueue<T> New<T>(WaitPolicy waitPolicy, SimEnvironment env)
{
switch (waitPolicy) {
case WaitPolicy.FIFO:
return new FifoWaitQueue<T>();
case WaitPolicy.LIFO:
return new LifoWaitQueue<T>();
case WaitPolicy.Priority:
return new PriorityWaitQueue<T>();
case WaitPolicy.Random:
return new RandomWaitQueue<T>(env.Random);
default:
var msg = ErrorMessages.InvalidEnum<WaitPolicy>();
throw new ArgumentException(msg);
}
}
public static Pair<T1, T2> NewPair<T1, T2>(T1 item1, T2 item2) where T2 : struct, IComparable<T2>
{
return new Pair<T1, T2>(item1, item2);
}
public sealed class Pair<T1, T2> : IComparable<Pair<T1, T2>>, IEquatable<Pair<T1, T2>>
where T2 : struct, IComparable<T2>
{
public readonly T1 Item1;
readonly T2 _item2;
public Pair(T1 item1, T2 item2)
{
Item1 = item1;
_item2 = item2;
}
public int CompareTo(Pair<T1, T2> other)
{
return _item2.CompareTo(other._item2);
}
public bool Equals(Pair<T1, T2> other)
{
return EqualityComparer<T1>.Default.Equals(Item1, other.Item1);
}
public override bool Equals(object obj)
{
if (ReferenceEquals(null, obj)) {
return false;
}
return obj is Pair<T1, T2> && Equals((Pair<T1, T2>) obj);
}
public override int GetHashCode()
{
return EqualityComparer<T1>.Default.GetHashCode(Item1);
}
}
}
sealed class FifoWaitQueue<T> : WaitQueueBase<T>
{
readonly SinglyLinkedList<T> _items = new SinglyLinkedList<T>();
public override int Count
{
get { return _items.Count; }
}
public override T First
{
get { return _items.First; }
}
public override WaitPolicy Policy
{
get { return WaitPolicy.FIFO; }
}
public override void Add(T item, double priority)
{
_items.AddLast(item);
}
public override bool Contains(T item)
{
return _items.Contains(item);
}
public override IEnumerator<T> GetEnumerator()
{
return _items.GetEnumerator();
}
public override bool Remove(T item)
{
Debug.Assert(_items.Contains(item));
return _items.Remove(item);
}
public override T RemoveFirst()
{
var first = _items.First;
_items.RemoveFirst();
return first;
}
}
sealed class LifoWaitQueue<T> : WaitQueueBase<T>
{
readonly ThinLinkedList<T> _items = new ThinLinkedList<T>();
public override int Count
{
get { return _items.Count; }
}
public override T First
{
get { return _items.First; }
}
public override WaitPolicy Policy
{
get { return WaitPolicy.LIFO; }
}
public override void Add(T item, double priority)
{
_items.AddFirst(item);
}
public override bool Contains(T item)
{
return _items.Contains(item);
}
public override IEnumerator<T> GetEnumerator()
{
return _items.GetEnumerator();
}
public override bool Remove(T item)
{
Debug.Assert(_items.Contains(item));
return _items.Remove(item);
}
public override T RemoveFirst()
{
var first = _items.First;
_items.RemoveFirst();
return first;
}
}
sealed class PriorityWaitQueue<T> : WaitQueueBase<T>
{
readonly OrderedCollection<WaitQueue.Pair<T, double>> _items;
public PriorityWaitQueue()
{
_items = OrderedCollection.New<WaitQueue.Pair<T, double>>(true);
}
public override int Count
{
get { return _items.Count; }
}
public override T First
{
get
{
Debug.Assert(_items.Count > 0);
return _items[0].Item1;
}
}
public override WaitPolicy Policy
{
get { return WaitPolicy.Priority; }
}
public override void Add(T item, double priority)
{
_items.Add(WaitQueue.NewPair(item, priority));
}
public override bool Contains(T item)
{
return _items.Contains(WaitQueue.NewPair(item, 0.0));
}
public override IEnumerator<T> GetEnumerator()
{
// ReSharper disable LoopCanBeConvertedToQuery
foreach (var item in _items) {
yield return item.Item1;
}
// ReSharper restore LoopCanBeConvertedToQuery
}
public override bool Remove(T item)
{
var tmpPair = WaitQueue.NewPair(item, 0.0);
Debug.Assert(_items.Contains(tmpPair));
return _items.Remove(tmpPair);
}
public override T RemoveFirst()
{
Debug.Assert(_items.Count > 0);
var first = _items[0].Item1;
_items.RemoveAt(0);
return first;
}
}
sealed class RandomWaitQueue<T> : WaitQueueBase<T>
{
readonly OrderedCollection<WaitQueue.Pair<T, int>> _items;
readonly TRandom _random;
public RandomWaitQueue(TRandom random)
{
_items = OrderedCollection.New<WaitQueue.Pair<T, int>>(true);
_random = random;
}
public override int Count
{
get { return _items.Count; }
}
public override T First
{
get
{
Debug.Assert(_items.Count > 0);
return _items[0].Item1;
}
}
public override WaitPolicy Policy
{
get { return WaitPolicy.Random; }
}
public override void Add(T item, double priority)
{
_items.Add(WaitQueue.NewPair(item, _random.Next()));
}
public override bool Contains(T item)
{
return _items.Contains(WaitQueue.NewPair(item, 0));
}
public override IEnumerator<T> GetEnumerator()
{
// ReSharper disable LoopCanBeConvertedToQuery
foreach (var item in _items) {
yield return item.Item1;
}
// ReSharper restore LoopCanBeConvertedToQuery
}
public override bool Remove(T item)
{
var tmpPair = WaitQueue.NewPair(item, 0);
Debug.Assert(_items.Contains(tmpPair));
return _items.Remove(tmpPair);
}
public override T RemoveFirst()
{
Debug.Assert(_items.Count > 0);
var first = _items[0].Item1;
_items.RemoveAt(0);
return first;
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
function LabMat::createUndo(%this, %class, %desc) {
pushInstantGroup();
%action = new UndoScriptAction() {
class = %class;
superClass = BaseMaterialEdAction;
actionName = %desc;
};
popInstantGroup();
return %action;
}
function LabMat::submitUndo(%this, %action) {
if(!%this.preventUndo)
%action.addToManager(Editor.getUndoManager());
}
function BaseMaterialEdAction::redo(%this) {
%this.redo();
}
function BaseMaterialEdAction::undo(%this) {
}
// Generic updateActiveMaterial redo/undo
function ActionUpdateActiveMaterial::redo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
/*
if( LabMat.currentMaterial != %this.material )
{
LabMat.currentObject = %this.object;
LabMat.setMode();
LabMat.setActiveMaterial(%this.material);
}
*/
eval("materialEd_previewMaterial." @ %this.field @ " = " @ %this.newValue @ ";");
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
eval("%this.material." @ %this.field @ " = " @ %this.newValue @ ";");
LabMat.currentMaterial.flush();
LabMat.currentMaterial.reload();
}
LabMat.preventUndo = true;
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
LabMat.preventUndo = false;
} else {
eval("%this.material." @ %this.field @ " = " @ %this.newValue @ ";");
%this.material.flush();
%this.material.reload();
}
}
function ActionUpdateActiveMaterial::undo(%this) {
LabMat.preventUndo = true;
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
/*
if( LabMat.currentMaterial != %this.material )
{
LabMat.currentObject = %this.object;
LabMat.setMode();
LabMat.setActiveMaterial(%this.material);
}
*/
eval("materialEd_previewMaterial." @ %this.field @ " = " @ %this.oldValue @ ";");
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
eval("%this.material." @ %this.field @ " = " @ %this.oldValue @ ";");
LabMat.currentMaterial.flush();
LabMat.currentMaterial.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
} else {
eval("%this.material." @ %this.field @ " = " @ %this.oldValue @ ";");
%this.material.flush();
%this.material.reload();
}
LabMat.preventUndo = false;
}
// Special case updateActiveMaterial redo/undo
function ActionUpdateActiveMaterialAnimationFlags::redo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
/*
if( LabMat.currentMaterial != %this.material )
{
LabMat.currentObject = %this.object;
LabMat.setMode();
LabMat.setActiveMaterial(%this.material);
}
*/
eval("materialEd_previewMaterial.animFlags[" @ %this.layer @ "] = " @ %this.newValue @ ";");
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
eval("%this.material.animFlags[" @ %this.layer @ "] = " @ %this.newValue @ ";");
LabMat.currentMaterial.flush();
LabMat.currentMaterial.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
} else {
eval("%this.material.animFlags[" @ %this.layer @ "] = " @ %this.newValue @ ";");
%this.material.flush();
%this.material.reload();
}
}
function ActionUpdateActiveMaterialAnimationFlags::undo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
eval("materialEd_previewMaterial.animFlags[" @ %this.layer @ "] = " @ %this.oldValue @ ";");
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
eval("%this.material.animFlags[" @ %this.layer @ "] = " @ %this.oldValue @ ";");
LabMat.currentMaterial.flush();
LabMat.currentMaterial.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
} else {
eval("%this.material.animFlags[" @ %this.layer @ "] = " @ %this.oldValue @ ";");
%this.material.flush();
%this.material.reload();
}
}
function ActionUpdateActiveMaterialName::redo(%this) {
%this.material.setName(%this.newName);
LabMat.updateMaterialReferences( MissionGroup, %this.oldName, %this.newName );
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
}
}
function ActionUpdateActiveMaterialName::undo(%this) {
%this.material.setName(%this.oldName);
LabMat.updateMaterialReferences( MissionGroup, %this.newName, %this.oldName );
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
}
}
function ActionRefreshMaterial::redo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
%this.material.setName( %this.newName );
LabMat.copyMaterials( %this.newMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
LabMat.copyMaterials( %this.newMaterial , %this.material );
%this.material.flush();
%this.material.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialNotDirty();
} else {
LabMat.copyMaterials( %this.newMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
}
function ActionRefreshMaterial::undo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
%this.material.setName( %this.oldName );
LabMat.copyMaterials( %this.oldMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
LabMat.copyMaterials( %this.oldMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
} else {
LabMat.copyMaterials( %this.oldMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
}
function ActionClearMaterial::redo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
LabMat.copyMaterials( %this.newMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
LabMat.copyMaterials( %this.newMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
} else {
LabMat.copyMaterials( %this.newMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
}
function ActionClearMaterial::undo(%this) {
if( MaterialEditorPreviewWindow.isVisible() && LabMat.currentMaterial == %this.material ) {
LabMat.copyMaterials( %this.oldMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
if (LabMat.livePreview == true) {
LabMat.copyMaterials( %this.oldMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
LabMat.guiSync( materialEd_previewMaterial );
LabMat.setMaterialDirty();
} else {
LabMat.copyMaterials( %this.oldMaterial, %this.material );
%this.material.flush();
%this.material.reload();
}
}
function ActionChangeMaterial::redo(%this) {
if( %this.mode $= "model" ) {
%this.object.changeMaterial( %this.materialTarget, %this.fromMaterial.getName(), %this.toMaterial.getName() );
LabMat.currentObject = %this.object;
if( %this.toMaterial.getFilename() !$= "tlab/gui/oldmatSelector.ed.gui" ||
%this.toMaterial.getFilename() !$= "tlab/materialEditor/scripts/materialEditor.ed.cs") {
matEd_PersistMan.removeObjectFromFile(%this.toMaterial);
}
matEd_PersistMan.setDirty(%this.fromMaterial);
matEd_PersistMan.setDirty(%this.toMaterial, %this.toMaterialNewFname);
matEd_PersistMan.saveDirty();
matEd_PersistMan.removeDirty(%this.fromMaterial);
matEd_PersistMan.removeDirty(%this.toMaterial);
} else {
eval("%this.object." @ %this.materialTarget @ " = " @ %this.toMaterial.getName() @ ";");
LabMat.currentObject.postApply();
}
if( MaterialEditorPreviewWindow.isVisible() )
LabMat.setActiveMaterial( %this.toMaterial );
}
function ActionChangeMaterial::undo(%this) {
if( %this.mode $= "model" ) {
%this.object.changeMaterial( %this.materialTarget, %this.toMaterial.getName(), %this.fromMaterial.getName() );
LabMat.currentObject = %this.object;
if( %this.toMaterial.getFilename() !$= "tlab/gui/oldmatSelector.ed.gui" ||
%this.toMaterial.getFilename() !$= "tlab/materialEditor/scripts/materialEditor.ed.cs") {
matEd_PersistMan.removeObjectFromFile(%this.toMaterial);
}
matEd_PersistMan.setDirty(%this.fromMaterial);
matEd_PersistMan.setDirty(%this.toMaterial, %this.toMaterialOldFname);
matEd_PersistMan.saveDirty();
matEd_PersistMan.removeDirty(%this.fromMaterial);
matEd_PersistMan.removeDirty(%this.toMaterial);
} else {
eval("%this.object." @ %this.materialTarget @ " = " @ %this.fromMaterial.getName() @ ";");
LabMat.currentObject.postApply();
}
if( MaterialEditorPreviewWindow.isVisible() )
LabMat.setActiveMaterial( %this.fromMaterial );
}
function ActionCreateNewMaterial::redo(%this) {
if( MaterialEditorPreviewWindow.isVisible() ) {
if( LabMat.currentMaterial != %this.newMaterial ) {
LabMat.currentObject = "";
LabMat.setMode();
LabMat.setActiveMaterial(%this.newMaterial);
}
LabMat.copyMaterials( %this.newMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
LabMat.guiSync( materialEd_previewMaterial );
}
%idx = UnlistedMaterials.getIndexFromValue( %this.newMaterial.getName() );
UnlistedMaterials.erase( %idx );
}
function ActionCreateNewMaterial::undo(%this) {
if( MaterialEditorPreviewWindow.isVisible() ) {
if( LabMat.currentMaterial != %this.oldMaterial ) {
LabMat.currentObject = "";
LabMat.setMode();
LabMat.setActiveMaterial(%this.oldMaterial);
}
LabMat.copyMaterials( %this.oldMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
LabMat.guiSync( materialEd_previewMaterial );
}
UnlistedMaterials.add( "unlistedMaterials", %this.newMaterial.getName() );
}
function ActionDeleteMaterial::redo(%this) {
if( MaterialEditorPreviewWindow.isVisible() ) {
if( LabMat.currentMaterial != %this.newMaterial ) {
LabMat.currentObject = "";
LabMat.setMode();
LabMat.setActiveMaterial(%this.newMaterial);
}
LabMat.copyMaterials( %this.newMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
LabMat.guiSync( materialEd_previewMaterial );
}
if( %this.oldMaterial.getFilename() !$= "tlab/gui/oldmatSelector.ed.gui" ||
%this.oldMaterial.getFilename() !$= "tlab/materialEditor/scripts/materialEditor.ed.cs") {
matEd_PersistMan.removeObjectFromFile(%this.oldMaterial);
}
UnlistedMaterials.add( "unlistedMaterials", %this.oldMaterial.getName() );
}
function ActionDeleteMaterial::undo(%this) {
if( MaterialEditorPreviewWindow.isVisible() ) {
if( LabMat.currentMaterial != %this.oldMaterial ) {
LabMat.currentObject = "";
LabMat.setMode();
LabMat.setActiveMaterial(%this.oldMaterial);
}
LabMat.copyMaterials( %this.oldMaterial, materialEd_previewMaterial );
materialEd_previewMaterial.flush();
materialEd_previewMaterial.reload();
LabMat.guiSync( materialEd_previewMaterial );
}
matEd_PersistMan.setDirty(%this.oldMaterial, %this.oldMaterialFname);
matEd_PersistMan.saveDirty();
matEd_PersistMan.removeDirty(%this.oldMaterial);
%idx = UnlistedMaterials.getIndexFromValue( %this.oldMaterial.getName() );
UnlistedMaterials.erase( %idx );
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using AllReady.Areas.Admin.Models.Validators;
using AllReady.Controllers;
using AllReady.DataAccess;
using AllReady.Models;
using AllReady.Security;
using AllReady.Services;
using Autofac;
using Autofac.Extensions.DependencyInjection;
using Autofac.Features.Variance;
using MediatR;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Identity.EntityFrameworkCore;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.PlatformAbstractions;
using AllReady.Security.Middleware;
using Newtonsoft.Json.Serialization;
using Geocoding;
using Geocoding.Google;
namespace AllReady
{
public class Startup
{
public Startup(IHostingEnvironment env)
{
// Setup configuration sources.
var builder = new ConfigurationBuilder()
.SetBasePath(env.ContentRootPath)
.AddJsonFile("version.json")
.AddJsonFile("config.json")
.AddJsonFile($"config.{env.EnvironmentName}.json", optional: true)
.AddEnvironmentVariables();
if (env.IsDevelopment())
{
// This reads the configuration keys from the secret store.
// For more details on using the user secret store see http://go.microsoft.com/fwlink/?LinkID=532709
builder.AddUserSecrets();
// This will push telemetry data through Application Insights pipeline faster, allowing you to view results immediately.
builder.AddApplicationInsightsSettings(developerMode: true);
}
Configuration = builder.Build();
Configuration["version"] = new ApplicationEnvironment().ApplicationVersion; // version in project.json
}
public IConfiguration Configuration { get; set; }
// This method gets called by the runtime. Use this method to add services to the container.
public IServiceProvider ConfigureServices(IServiceCollection services)
{
// Add Application Insights data collection services to the services container.
services.AddApplicationInsightsTelemetry(Configuration);
// Add Entity Framework services to the services container.
var ef = services.AddDbContext<AllReadyContext>(options => options.UseSqlServer(Configuration["Data:DefaultConnection:ConnectionString"]));
services.Configure<AzureStorageSettings>(Configuration.GetSection("Data:Storage"));
services.Configure<DatabaseSettings>(Configuration.GetSection("Data:DefaultConnection"));
services.Configure<EmailSettings>(Configuration.GetSection("Email"));
services.Configure<SampleDataSettings>(Configuration.GetSection("SampleData"));
services.Configure<GeneralSettings>(Configuration.GetSection("General"));
// Add CORS support
services.AddCors(options =>
{
options.AddPolicy("allReady",
builder => builder.AllowAnyOrigin()
.AllowAnyHeader()
.AllowAnyMethod()
.AllowCredentials()
);
});
// Add Identity services to the services container.
services.AddIdentity<ApplicationUser, IdentityRole>(options =>
{
options.Password.RequiredLength = 10;
options.Password.RequireNonAlphanumeric = false;
options.Password.RequireDigit = true;
options.Password.RequireUppercase = false;
options.Cookies.ApplicationCookie.AccessDeniedPath = new PathString("/Home/AccessDenied");
})
.AddEntityFrameworkStores<AllReadyContext>()
.AddDefaultTokenProviders();
// Add Authorization rules for the app
services.AddAuthorization(options =>
{
options.AddPolicy("OrgAdmin", b => b.RequireClaim(Security.ClaimTypes.UserType, "OrgAdmin", "SiteAdmin"));
options.AddPolicy("SiteAdmin", b => b.RequireClaim(Security.ClaimTypes.UserType, "SiteAdmin"));
});
// Add MVC services to the services container.
services.AddMvc().AddJsonOptions(options =>
options.SerializerSettings.ContractResolver = new DefaultContractResolver());
//register MediatR
//https://lostechies.com/jimmybogard/2016/07/19/mediatr-extensions-for-microsoft-dependency-injection-released/
services.AddMediatR(typeof(Startup));
// configure IoC support
var container = CreateIoCContainer(services);
return container.Resolve<IServiceProvider>();
}
private IContainer CreateIoCContainer(IServiceCollection services)
{
// todo: move these to a proper autofac module
// Register application services.
services.AddSingleton((x) => Configuration);
services.AddTransient<IEmailSender, AuthMessageSender>();
services.AddTransient<ISmsSender, AuthMessageSender>();
services.AddTransient<IAllReadyDataAccess, AllReadyDataAccessEF7>();
services.AddTransient<IDetermineIfATaskIsEditable, DetermineIfATaskIsEditable>();
services.AddTransient<IValidateEventDetailModels, EventEditModelValidator>();
services.AddTransient<ITaskSummaryModelValidator, TaskSummaryModelValidator>();
services.AddTransient<IItineraryEditModelValidator, ItineraryEditModelValidator>();
services.AddTransient<IOrganizationEditModelValidator, OrganizationEditModelValidator>();
services.AddSingleton<IImageService, ImageService>();
services.AddSingleton<IGeocoder, GoogleGeocoder>();
services.AddTransient<SampleDataGenerator>();
if (Configuration["Data:Storage:EnableAzureQueueService"] == "true")
{
// This setting is false by default. To enable queue processing you will
// need to override the setting in your user secrets or env vars.
services.AddTransient<IQueueStorageService, QueueStorageService>();
}
else
{
// this writer service will just write to the default logger
services.AddTransient<IQueueStorageService, FakeQueueWriterService>();
}
var containerBuilder = new ContainerBuilder();
containerBuilder.RegisterSource(new ContravariantRegistrationSource());
containerBuilder.RegisterAssemblyTypes(typeof(Startup).Assembly).AsImplementedInterfaces();
//Populate the container with services that were previously registered
containerBuilder.Populate(services);
var container = containerBuilder.Build();
return container;
}
// Configure is called after ConfigureServices is called.
public async void Configure(IApplicationBuilder app, IHostingEnvironment env, ILoggerFactory loggerFactory, SampleDataGenerator sampleData, AllReadyContext context,
IConfiguration configuration)
{
// todo: in RC update we can read from a logging.json config file
loggerFactory.AddConsole((category, level) =>
{
if (category.StartsWith("Microsoft."))
{
return level >= LogLevel.Information;
}
return true;
});
if (env.IsDevelopment())
{
// this will go to the VS output window
loggerFactory.AddDebug((category, level) =>
{
if (category.StartsWith("Microsoft."))
{
return level >= LogLevel.Information;
}
return true;
});
}
// CORS support
app.UseCors("allReady");
// Configure the HTTP request pipeline.
var usCultureInfo = new CultureInfo("en-US");
app.UseRequestLocalization(new RequestLocalizationOptions
{
SupportedCultures = new List<CultureInfo>(new[] { usCultureInfo }),
SupportedUICultures = new List<CultureInfo>(new[] { usCultureInfo })
});
// Add Application Insights to the request pipeline to track HTTP request telemetry data.
app.UseApplicationInsightsRequestTelemetry();
// Add the following to the request pipeline only in development environment.
if (env.IsDevelopment())
{
app.UseBrowserLink();
app.UseDeveloperExceptionPage();
app.UseDatabaseErrorPage();
}
else if (env.IsStaging())
{
app.UseDeveloperExceptionPage();
app.UseDatabaseErrorPage();
}
else
{
// Add Error handling middleware which catches all application specific errors and
// sends the request to the following path or controller action.
app.UseExceptionHandler("/Home/Error");
}
// Track data about exceptions from the application. Should be configured after all error handling middleware in the request pipeline.
app.UseApplicationInsightsExceptionTelemetry();
// Add static files to the request pipeline.
app.UseStaticFiles();
// Add cookie-based authentication to the request pipeline.
app.UseIdentity();
// Add token-based protection to the request inject pipeline
app.UseTokenProtection(new TokenProtectedResourceOptions
{
Path = "/api/request",
PolicyName = "api-request-injest"
});
// Add authentication middleware to the request pipeline. You can configure options such as Id and Secret in the ConfigureServices method.
// For more information see http://go.microsoft.com/fwlink/?LinkID=532715
if (Configuration["Authentication:Facebook:AppId"] != null)
{
var options = new FacebookOptions
{
AppId = Configuration["Authentication:Facebook:AppId"],
AppSecret = Configuration["Authentication:Facebook:AppSecret"],
BackchannelHttpHandler = new FacebookBackChannelHandler(),
UserInformationEndpoint = "https://graph.facebook.com/v2.5/me?fields=id,name,email,first_name,last_name"
};
options.Scope.Add("email");
app.UseFacebookAuthentication(options);
}
if (Configuration["Authentication:MicrosoftAccount:ClientId"] != null)
{
var options = new MicrosoftAccountOptions
{
ClientId = Configuration["Authentication:MicrosoftAccount:ClientId"],
ClientSecret = Configuration["Authentication:MicrosoftAccount:ClientSecret"]
};
app.UseMicrosoftAccountAuthentication(options);
}
if (Configuration["Authentication:Twitter:ConsumerKey"] != null)
{
var options = new TwitterOptions
{
ConsumerKey = Configuration["Authentication:Twitter:ConsumerKey"],
ConsumerSecret = Configuration["Authentication:Twitter:ConsumerSecret"]
};
app.UseTwitterAuthentication(options);
}
if (Configuration["Authentication:Google:ClientId"] != null)
{
var options = new GoogleOptions
{
ClientId = Configuration["Authentication:Google:ClientId"],
ClientSecret = Configuration["Authentication:Google:ClientSecret"]
};
app.UseGoogleAuthentication(options);
}
// Add MVC to the request pipeline.
app.UseMvc(routes =>
{
routes.MapRoute(name: "areaRoute", template: "{area:exists}/{controller}/{action=Index}/{id?}");
routes.MapRoute(name: "default", template: "{controller=Home}/{action=Index}/{id?}");
});
// Add sample data and test admin accounts if specified in Config.Json.
// for production applications, this should either be set to false or deleted.
if (env.IsDevelopment() || env.IsEnvironment("Staging"))
{
context.Database.Migrate();
}
if (Configuration["SampleData:InsertSampleData"] == "true")
{
sampleData.InsertTestData();
}
if (Configuration["SampleData:InsertTestUsers"] == "true")
{
await sampleData.CreateAdminUser();
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Android.Content;
using Android.Views;
using Android.Views.Animations;
using ARelativeLayout = Android.Widget.RelativeLayout;
namespace Xamarin.Forms.Platform.Android.AppCompat
{
internal class Platform : BindableObject, IPlatform, IPlatformLayout, INavigation, IDisposable
{
readonly Context _context;
readonly PlatformRenderer _renderer;
bool _disposed;
bool _navAnimationInProgress;
NavigationModel _navModel = new NavigationModel();
public Platform(Context context)
{
_context = context;
_renderer = new PlatformRenderer(context, this);
FormsAppCompatActivity.BackPressed += HandleBackPressed;
}
internal bool NavAnimationInProgress
{
get { return _navAnimationInProgress; }
set
{
if (_navAnimationInProgress == value)
return;
_navAnimationInProgress = value;
if (value)
MessagingCenter.Send(this, CloseContextActionsSignalName);
}
}
Page Page { get; set; }
public void Dispose()
{
if (_disposed)
return;
_disposed = true;
SetPage(null);
FormsAppCompatActivity.BackPressed -= HandleBackPressed;
}
void INavigation.InsertPageBefore(Page page, Page before)
{
throw new InvalidOperationException("InsertPageBefore is not supported globally on Android, please use a NavigationPage.");
}
IReadOnlyList<Page> INavigation.ModalStack => _navModel.Modals.ToList();
IReadOnlyList<Page> INavigation.NavigationStack => new List<Page>();
Task<Page> INavigation.PopAsync()
{
return ((INavigation)this).PopAsync(true);
}
Task<Page> INavigation.PopAsync(bool animated)
{
throw new InvalidOperationException("PopAsync is not supported globally on Android, please use a NavigationPage.");
}
Task<Page> INavigation.PopModalAsync()
{
return ((INavigation)this).PopModalAsync(true);
}
Task<Page> INavigation.PopModalAsync(bool animated)
{
Page modal = _navModel.PopModal();
modal.SendDisappearing();
var source = new TaskCompletionSource<Page>();
IVisualElementRenderer modalRenderer = Android.Platform.GetRenderer(modal);
if (modalRenderer != null)
{
var modalContainer = modalRenderer.ViewGroup.Parent as ModalContainer;
if (animated)
{
modalContainer.Animate().TranslationY(_renderer.Height).SetInterpolator(new AccelerateInterpolator(1)).SetDuration(300).SetListener(new GenericAnimatorListener
{
OnEnd = a =>
{
modalContainer.RemoveFromParent();
modalContainer.Dispose();
source.TrySetResult(modal);
_navModel.CurrentPage?.SendAppearing();
modalContainer = null;
}
});
}
else
{
modalContainer.RemoveFromParent();
modalContainer.Dispose();
source.TrySetResult(modal);
_navModel.CurrentPage?.SendAppearing();
}
}
return source.Task;
}
Task INavigation.PopToRootAsync()
{
return ((INavigation)this).PopToRootAsync(true);
}
Task INavigation.PopToRootAsync(bool animated)
{
throw new InvalidOperationException("PopToRootAsync is not supported globally on Android, please use a NavigationPage.");
}
Task INavigation.PushAsync(Page root)
{
return ((INavigation)this).PushAsync(root, true);
}
Task INavigation.PushAsync(Page root, bool animated)
{
throw new InvalidOperationException("PushAsync is not supported globally on Android, please use a NavigationPage.");
}
Task INavigation.PushModalAsync(Page modal)
{
return ((INavigation)this).PushModalAsync(modal, true);
}
async Task INavigation.PushModalAsync(Page modal, bool animated)
{
_navModel.CurrentPage?.SendDisappearing();
_navModel.PushModal(modal);
modal.Platform = this;
Task presentModal = PresentModal(modal, animated);
await presentModal;
// Verify that the modal is still on the stack
if (_navModel.CurrentPage == modal)
modal.SendAppearing();
}
void INavigation.RemovePage(Page page)
{
throw new InvalidOperationException("RemovePage is not supported globally on Android, please use a NavigationPage.");
}
SizeRequest IPlatform.GetNativeSize(VisualElement view, double widthConstraint, double heightConstraint)
{
Performance.Start();
// FIXME: potential crash
IVisualElementRenderer viewRenderer = Android.Platform.GetRenderer(view);
// negative numbers have special meanings to android they don't to us
widthConstraint = widthConstraint <= -1 ? double.PositiveInfinity : _context.ToPixels(widthConstraint);
heightConstraint = heightConstraint <= -1 ? double.PositiveInfinity : _context.ToPixels(heightConstraint);
int width = !double.IsPositiveInfinity(widthConstraint)
? MeasureSpecFactory.MakeMeasureSpec((int)widthConstraint, MeasureSpecMode.AtMost)
: MeasureSpecFactory.MakeMeasureSpec(0, MeasureSpecMode.Unspecified);
int height = !double.IsPositiveInfinity(heightConstraint)
? MeasureSpecFactory.MakeMeasureSpec((int)heightConstraint, MeasureSpecMode.AtMost)
: MeasureSpecFactory.MakeMeasureSpec(0, MeasureSpecMode.Unspecified);
SizeRequest rawResult = viewRenderer.GetDesiredSize(width, height);
if (rawResult.Minimum == Size.Zero)
rawResult.Minimum = rawResult.Request;
var result = new SizeRequest(new Size(_context.FromPixels(rawResult.Request.Width), _context.FromPixels(rawResult.Request.Height)),
new Size(_context.FromPixels(rawResult.Minimum.Width), _context.FromPixels(rawResult.Minimum.Height)));
Performance.Stop();
return result;
}
void IPlatformLayout.OnLayout(bool changed, int l, int t, int r, int b)
{
if (changed)
LayoutRootPage(Page, r - l, b - t);
Android.Platform.GetRenderer(Page).UpdateLayout();
for (var i = 0; i < _renderer.ChildCount; i++)
{
global::Android.Views.View child = _renderer.GetChildAt(i);
if (child is ModalContainer)
{
child.Measure(MeasureSpecFactory.MakeMeasureSpec(r - l, MeasureSpecMode.Exactly), MeasureSpecFactory.MakeMeasureSpec(t - b, MeasureSpecMode.Exactly));
child.Layout(l, t, r, b);
}
}
}
protected override void OnBindingContextChanged()
{
SetInheritedBindingContext(Page, BindingContext);
base.OnBindingContextChanged();
}
internal void SetPage(Page newRoot)
{
var layout = false;
if (Page != null)
{
_renderer.RemoveAllViews();
foreach (IVisualElementRenderer rootRenderer in _navModel.Roots.Select(Android.Platform.GetRenderer))
rootRenderer.Dispose();
_navModel = new NavigationModel();
layout = true;
}
if (newRoot == null)
return;
_navModel.Push(newRoot, null);
Page = newRoot;
Page.Platform = this;
AddChild(Page, layout);
((Application)Page.RealParent).NavigationProxy.Inner = this;
}
void AddChild(Page page, bool layout = false)
{
if (Android.Platform.GetRenderer(page) != null)
return;
Android.Platform.SetPageContext(page, _context);
IVisualElementRenderer renderView = Android.Platform.CreateRenderer(page);
Android.Platform.SetRenderer(page, renderView);
if (layout)
LayoutRootPage(page, _renderer.Width, _renderer.Height);
_renderer.AddView(renderView.ViewGroup);
}
bool HandleBackPressed(object sender, EventArgs e)
{
if (NavAnimationInProgress)
return true;
Page root = _navModel.Roots.Last();
bool handled = root.SendBackButtonPressed();
return handled;
}
void LayoutRootPage(Page page, int width, int height)
{
var activity = (FormsAppCompatActivity)_context;
int statusBarHeight = Forms.IsLollipopOrNewer ? activity.GetStatusBarHeight() : 0;
if (page is MasterDetailPage)
page.Layout(new Rectangle(0, 0, _context.FromPixels(width), _context.FromPixels(height)));
else
{
page.Layout(new Rectangle(0, _context.FromPixels(statusBarHeight), _context.FromPixels(width), _context.FromPixels(height - statusBarHeight)));
}
}
Task PresentModal(Page modal, bool animated)
{
var modalContainer = new ModalContainer(_context, modal);
_renderer.AddView(modalContainer);
var source = new TaskCompletionSource<bool>();
NavAnimationInProgress = true;
if (animated)
{
modalContainer.TranslationY = _renderer.Height;
modalContainer.Animate().TranslationY(0).SetInterpolator(new DecelerateInterpolator(1)).SetDuration(300).SetListener(new GenericAnimatorListener
{
OnEnd = a =>
{
source.TrySetResult(false);
NavAnimationInProgress = false;
modalContainer = null;
},
OnCancel = a =>
{
source.TrySetResult(true);
NavAnimationInProgress = false;
modalContainer = null;
}
});
}
else
{
NavAnimationInProgress = false;
source.TrySetResult(true);
}
return source.Task;
}
sealed class ModalContainer : ViewGroup
{
global::Android.Views.View _backgroundView;
bool _disposed;
Page _modal;
IVisualElementRenderer _renderer;
public ModalContainer(Context context, Page modal) : base(context)
{
_modal = modal;
_backgroundView = new global::Android.Views.View(context);
_backgroundView.SetWindowBackground();
AddView(_backgroundView);
Android.Platform.SetPageContext(modal, context);
_renderer = Android.Platform.CreateRenderer(modal);
Android.Platform.SetRenderer(modal, _renderer);
AddView(_renderer.ViewGroup);
}
protected override void Dispose(bool disposing)
{
if (disposing && !_disposed)
{
_disposed = true;
RemoveAllViews();
if (_renderer != null)
{
_renderer.Dispose();
_renderer = null;
_modal.ClearValue(Android.Platform.RendererProperty);
_modal = null;
}
if (_backgroundView != null)
{
_backgroundView.Dispose();
_backgroundView = null;
}
}
base.Dispose(disposing);
}
protected override void OnLayout(bool changed, int l, int t, int r, int b)
{
var activity = (FormsAppCompatActivity)Context;
int statusBarHeight = Forms.IsLollipopOrNewer ? activity.GetStatusBarHeight() : 0;
if (changed)
{
if (_modal is MasterDetailPage)
_modal.Layout(new Rectangle(0, 0, activity.FromPixels(r - l), activity.FromPixels(b - t)));
else
{
_modal.Layout(new Rectangle(0, activity.FromPixels(statusBarHeight), activity.FromPixels(r - l), activity.FromPixels(b - t - statusBarHeight)));
}
_backgroundView.Layout(0, statusBarHeight, r - l, b - t);
}
_renderer.UpdateLayout();
}
}
#region Statics
public static implicit operator ViewGroup(Platform canvas)
{
return canvas._renderer;
}
internal const string CloseContextActionsSignalName = "Xamarin.CloseContextActions";
#endregion
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.