context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Windows;
using System.Windows.Controls;
using System.Windows.Media;
using System.Windows.Shapes;
using Microsoft.Msagl.Core.DataStructures;
using Microsoft.Msagl.Core.Geometry.Curves;
using Microsoft.Msagl.Core.Layout;
using Microsoft.Msagl.DebugHelpers;
using Microsoft.Msagl.Drawing;
using Microsoft.Msagl.Layout.LargeGraphLayout;
using Color = System.Windows.Media.Color;
using Edge = Microsoft.Msagl.Drawing.Edge;
using Ellipse = Microsoft.Msagl.Core.Geometry.Curves.Ellipse;
using LineSegment = Microsoft.Msagl.Core.Geometry.Curves.LineSegment;
using Node = Microsoft.Msagl.Drawing.Node;
using Point = Microsoft.Msagl.Core.Geometry.Point;
using Polyline = Microsoft.Msagl.Core.Geometry.Curves.Polyline;
using Shape = Microsoft.Msagl.Drawing.Shape;
using Size = System.Windows.Size;
using WpfLineSegment = System.Windows.Media.LineSegment;
namespace Microsoft.Msagl.GraphmapsWpfControl {
public class GraphmapsNode : IViewerNode, IInvalidatable {
readonly LgLayoutSettings lgSettings;
public Path BoundaryPath;
internal FrameworkElement FrameworkElementOfNodeForLabel;
readonly Func<Edge, GraphmapsEdge> funcFromDrawingEdgeToVEdge;
internal LgNodeInfo LgNodeInfo;
Subgraph subgraph;
Node node;
Border collapseButtonBorder;
Rectangle topMarginRect;
Path collapseSymbolPath;
Brush collapseSymbolPathInactive = Brushes.Silver;
internal int ZIndex {
get {
var geomNode = Node.GeometryNode;
if (geomNode == null)
return 0;
int ret = 0;
do {
if (geomNode.ClusterParents == null)
return ret;
geomNode = geomNode.ClusterParents.FirstOrDefault();
if (geomNode != null)
ret++;
else
return ret;
} while (true);
}
}
public Node Node {
get { return node; }
set {
node = value;
subgraph = node as Subgraph;
}
}
internal GraphmapsNode(Node node, LgNodeInfo lgNodeInfo, FrameworkElement frameworkElementOfNodeForLabelOfLabel,
Func<Edge, GraphmapsEdge> funcFromDrawingEdgeToVEdge, Func<double> pathStrokeThicknessFunc, LgLayoutSettings lgSettings)
{
this.lgSettings = lgSettings;
PathStrokeThicknessFunc = pathStrokeThicknessFunc;
LgNodeInfo = lgNodeInfo;
Node = node;
FrameworkElementOfNodeForLabel = frameworkElementOfNodeForLabelOfLabel;
this.funcFromDrawingEdgeToVEdge = funcFromDrawingEdgeToVEdge;
CreateNodeBoundaryPath();
if (FrameworkElementOfNodeForLabel != null)
{
FrameworkElementOfNodeForLabel.Tag = this; //get a backpointer to the VNode
Common.PositionFrameworkElement(FrameworkElementOfNodeForLabel, node.GeometryNode.Center, 1);
Panel.SetZIndex(FrameworkElementOfNodeForLabel, Panel.GetZIndex(BoundaryPath) + 1);
}
SetupSubgraphDrawing();
Node.GeometryNode.BeforeLayoutChangeEvent += GeometryNodeBeforeLayoutChangeEvent;
Node.Attr.VisualsChanged += (a, b) => Invalidate();
}
internal GraphmapsNode(Node node, LgNodeInfo lgNodeInfo, FrameworkElement frameworkElementOfNodeForLabelOfLabel,
Func<Edge, GraphmapsEdge> funcFromDrawingEdgeToVEdge, Func<double> pathStrokeThicknessFunc)
{
PathStrokeThicknessFunc = pathStrokeThicknessFunc;
LgNodeInfo = lgNodeInfo;
Node = node;
FrameworkElementOfNodeForLabel = frameworkElementOfNodeForLabelOfLabel;
this.funcFromDrawingEdgeToVEdge = funcFromDrawingEdgeToVEdge;
CreateNodeBoundaryPath();
if (FrameworkElementOfNodeForLabel != null) {
FrameworkElementOfNodeForLabel.Tag = this; //get a backpointer to the VNode
Common.PositionFrameworkElement(FrameworkElementOfNodeForLabel, node.GeometryNode.Center, 1);
Panel.SetZIndex(FrameworkElementOfNodeForLabel, Panel.GetZIndex(BoundaryPath) + 1);
}
SetupSubgraphDrawing();
Node.GeometryNode.BeforeLayoutChangeEvent += GeometryNodeBeforeLayoutChangeEvent;
Node.Attr.VisualsChanged += (a, b) => Invalidate();
}
internal IEnumerable<FrameworkElement> FrameworkElements {
get {
if (FrameworkElementOfNodeForLabel != null) yield return FrameworkElementOfNodeForLabel;
if (BoundaryPath != null) yield return BoundaryPath;
if (collapseButtonBorder != null) {
yield return collapseButtonBorder;
yield return topMarginRect;
yield return collapseSymbolPath;
}
}
}
void SetupSubgraphDrawing() {
if (subgraph == null) return;
SetupTopMarginBorder();
SetupCollapseSymbol();
}
void SetupTopMarginBorder() {
var cluster = (Cluster) subgraph.GeometryObject;
topMarginRect = new Rectangle {
Fill = Brushes.Transparent,
Width = Node.Width,
Height = cluster.RectangularBoundary.TopMargin
};
PositionTopMarginBorder(cluster);
SetZIndexAndMouseInteractionsForTopMarginRect();
}
void PositionTopMarginBorder(Cluster cluster) {
var box = cluster.BoundaryCurve.BoundingBox;
Common.PositionFrameworkElement(topMarginRect,
box.LeftTop + new Point(topMarginRect.Width/2, -topMarginRect.Height/2), 1);
}
void SetZIndexAndMouseInteractionsForTopMarginRect() {
topMarginRect.MouseEnter +=
(
(a, b) => {
collapseButtonBorder.Background = Common.BrushFromMsaglColor(subgraph.CollapseButtonColorActive);
collapseSymbolPath.Stroke = Brushes.Black;
}
);
topMarginRect.MouseLeave +=
(a, b) => {
collapseButtonBorder.Background = Common.BrushFromMsaglColor(subgraph.CollapseButtonColorInactive);
collapseSymbolPath.Stroke = Brushes.Silver;
};
Panel.SetZIndex(topMarginRect, int.MaxValue);
}
void SetupCollapseSymbol() {
var collapseBorderSize = GetCollapseBorderSymbolSize();
Debug.Assert(collapseBorderSize > 0);
collapseButtonBorder = new Border {
Background = Common.BrushFromMsaglColor(subgraph.CollapseButtonColorInactive),
Width = collapseBorderSize,
Height = collapseBorderSize,
CornerRadius = new CornerRadius(collapseBorderSize/2)
};
Panel.SetZIndex(collapseButtonBorder, Panel.GetZIndex(BoundaryPath) + 1);
var collapseButtonCenter = GetCollapseButtonCenter(collapseBorderSize);
Common.PositionFrameworkElement(collapseButtonBorder, collapseButtonCenter, 1);
double w = collapseBorderSize*0.4;
collapseSymbolPath = new Path {
Data = CreateCollapseSymbolPath(collapseButtonCenter + new Point(0, -w/2), w),
Stroke = collapseSymbolPathInactive,
StrokeThickness = 1
};
Panel.SetZIndex(collapseSymbolPath, Panel.GetZIndex(collapseButtonBorder) + 1);
topMarginRect.MouseLeftButtonDown += TopMarginRectMouseLeftButtonDown;
}
/// <summary>
/// </summary>
public event Action<IViewerNode> IsCollapsedChanged;
void InvokeIsCollapsedChanged()
{
if (IsCollapsedChanged != null)
IsCollapsedChanged(this);
}
void TopMarginRectMouseLeftButtonDown(object sender, System.Windows.Input.MouseButtonEventArgs e) {
var pos = e.GetPosition(collapseButtonBorder);
if (pos.X <= collapseButtonBorder.Width && pos.Y <= collapseButtonBorder.Height && pos.X >= 0 && pos.Y >= 0) {
e.Handled = true;
var cluster=(Cluster)subgraph.GeometryNode;
cluster.IsCollapsed = !cluster.IsCollapsed;
InvokeIsCollapsedChanged();
}
}
double GetCollapseBorderSymbolSize() {
return ((Cluster) subgraph.GeometryNode).RectangularBoundary.TopMargin -
PathStrokeThickness/2 - 0.5;
}
Point GetCollapseButtonCenter(double collapseBorderSize) {
var box = subgraph.GeometryNode.BoundaryCurve.BoundingBox;
//cannot trust subgraph.GeometryNode.BoundingBox for a cluster
double offsetFromBoundaryPath = PathStrokeThickness/2 + 0.5;
var collapseButtonCenter = box.LeftTop + new Point(collapseBorderSize/2 + offsetFromBoundaryPath,
-collapseBorderSize/2 - offsetFromBoundaryPath);
return collapseButtonCenter;
}
/*
void FlipCollapsePath() {
var size = GetCollapseBorderSymbolSize();
var center = GetCollapseButtonCenter(size);
if (collapsePathFlipped) {
collapsePathFlipped = false;
collapseSymbolPath.RenderTransform = null;
}
else {
collapsePathFlipped = true;
collapseSymbolPath.RenderTransform = new RotateTransform(180, center.X, center.Y);
}
}
*/
Geometry CreateCollapseSymbolPath(Point center, double width) {
var pathGeometry = new PathGeometry();
var pathFigure = new PathFigure {StartPoint = Common.WpfPoint(center + new Point(-width, width))};
pathFigure.Segments.Add(new System.Windows.Media.LineSegment(Common.WpfPoint(center), true));
pathFigure.Segments.Add(
new System.Windows.Media.LineSegment(Common.WpfPoint(center + new Point(width, width)), true));
pathGeometry.Figures.Add(pathFigure);
return pathGeometry;
}
void GeometryNodeBeforeLayoutChangeEvent(object sender, LayoutChangeEventArgs e) {
var newBoundaryCurve = e.DataAfterChange as ICurve;
if (newBoundaryCurve != null) {
//just compare the bounding boxes for the time being
var nb = newBoundaryCurve.BoundingBox;
var box = Node.BoundingBox;
if (Math.Abs(nb.Width - box.Width) > 0.00001 || Math.Abs(nb.Height - box.Height) > 0.00001)
BoundaryCurveIsDirty = true;
}
else
BoundaryCurveIsDirty = true;
}
internal void CreateNodeBoundaryPath() {
BoundaryPath = new Path {Tag = this};
Panel.SetZIndex(BoundaryPath, ZIndex);
SetFillAndStroke();
if (Node.Label != null)
BoundaryPath.ToolTip = new ToolTip {Content = new TextBlock {Text = Node.LabelText}};
}
internal Func<double> PathStrokeThicknessFunc;
public double PathStrokeThickness
{
get
{
return this.Node.Attr.LineWidth;
//return PathStrokeThicknessFunc != null ? PathStrokeThicknessFunc() : this.Node.Attr.LineWidth;
}
}
void SetFillAndStroke()
{
BoundaryPath.Stroke = Common.BrushFromMsaglColor(Drawing.Color.Black);
//jyoti changed node color
//BoundaryPath.Stroke = Common.BrushFromMsaglColor(node.Attr.Color);
SetBoundaryFill();
//BoundaryPath.StrokeThickness = PathStrokeThickness;
//jyoti changed strokethickness
BoundaryPath.StrokeThickness = PathStrokeThickness / 2;
if (LgNodeInfo != null && LgNodeInfo.PartiteSet == 1)
BoundaryPath.StrokeThickness = (PathStrokeThickness*1.5);
var textBlock = FrameworkElementOfNodeForLabel as TextBlock;
if (textBlock != null)
{
textBlock.Foreground = Common.BrushFromMsaglColor(Drawing.Color.Black);
//jyoti changed node color
//var col = Node.Label.FontColor;
//textBlock.Foreground = Common.BrushFromMsaglColor(new Drawing.Color(col.A, col.R, col.G, col.B));
}
}
void SetBoundaryFill() {
//jyoti changed all node colors
BoundaryPath.Fill = Brushes.DarkGray;
if (LgNodeInfo != null && LgNodeInfo.Selected)
BoundaryPath.Fill = LgNodeInfo.Color;//Brushes.Red;
else if (LgNodeInfo != null && LgNodeInfo.SelectedNeighbor>0)
{
BoundaryPath.Fill = Brushes.Yellow;
}
return;
if (LgNodeInfo == null) {
BoundaryPath.Fill = Brushes.Blue;
return;
}
var colBlack = new Drawing.Color(0, 0, 0);
if (!Node.Attr.Color.Equals(colBlack))
{
BoundaryPath.Fill = LgNodeInfo.Selected
? GetSelBrushColor()
: Common.BrushFromMsaglColor(Node.Attr.Color);
return;
}
BoundaryPath.Fill = LgNodeInfo.Selected
? GetSelBrushColor()
: (LgNodeInfo != null && LgNodeInfo.SlidingZoomLevel == 0
? Brushes.Aqua
: Common.BrushFromMsaglColor(Node.Attr.FillColor));
if (LgNodeInfo != null && !LgNodeInfo.Selected) {
BoundaryPath.Fill = (LgNodeInfo.ZoomLevel < 2
? Brushes.LightGreen
: (LgNodeInfo.ZoomLevel < 4 ? Brushes.LightBlue : Brushes.LightYellow));
}
}
public static void DrawFigure(StreamGeometryContext ctx, PathFigure figure)
{
ctx.BeginFigure(figure.StartPoint, figure.IsFilled, figure.IsClosed);
foreach (var segment in figure.Segments)
{
var lineSegment = segment as WpfLineSegment;
if (lineSegment != null) { ctx.LineTo(lineSegment.Point, lineSegment.IsStroked, lineSegment.IsSmoothJoin); continue; }
var bezierSegment = segment as BezierSegment;
if (bezierSegment != null) { ctx.BezierTo(bezierSegment.Point1, bezierSegment.Point2, bezierSegment.Point3, bezierSegment.IsStroked, bezierSegment.IsSmoothJoin); continue; }
var quadraticSegment = segment as QuadraticBezierSegment;
if (quadraticSegment != null) { ctx.QuadraticBezierTo(quadraticSegment.Point1, quadraticSegment.Point2, quadraticSegment.IsStroked, quadraticSegment.IsSmoothJoin); continue; }
var polyLineSegment = segment as PolyLineSegment;
if (polyLineSegment != null) { ctx.PolyLineTo(polyLineSegment.Points, polyLineSegment.IsStroked, polyLineSegment.IsSmoothJoin); continue; }
var polyBezierSegment = segment as PolyBezierSegment;
if (polyBezierSegment != null) { ctx.PolyBezierTo(polyBezierSegment.Points, polyBezierSegment.IsStroked, polyBezierSegment.IsSmoothJoin); continue; }
var polyQuadraticSegment = segment as PolyQuadraticBezierSegment;
if (polyQuadraticSegment != null) { ctx.PolyQuadraticBezierTo(polyQuadraticSegment.Points, polyQuadraticSegment.IsStroked, polyQuadraticSegment.IsSmoothJoin); continue; }
var arcSegment = segment as ArcSegment;
if (arcSegment != null) { ctx.ArcTo(arcSegment.Point, arcSegment.Size, arcSegment.RotationAngle, arcSegment.IsLargeArc, arcSegment.SweepDirection, arcSegment.IsStroked, arcSegment.IsSmoothJoin); continue; }
}
}
Geometry GetNodeDotEllipseGeometry(double nodeDotWidth) {
return new EllipseGeometry(Common.WpfPoint(Node.BoundingBox.Center), nodeDotWidth / 2,
nodeDotWidth / 2);
}
StreamGeometry GetNodeDotEllipseStreamGeometry(double nodeDotWidth) {
var geometry = new StreamGeometry();
using (var ctx = geometry.Open()) {
var ellipse = GetNodeDotEllipseGeometry(nodeDotWidth);
var figure = PathGeometry.CreateFromGeometry(ellipse).Figures[0];
DrawFigure(ctx, figure);
}
geometry.Freeze();
return geometry;
}
#region Implementation of IViewerObject
public DrawingObject DrawingObject {
get { return Node; }
}
public bool MarkedForDragging { get; set; }
#pragma warning disable 0067
public event EventHandler MarkedForDraggingEvent;
public event EventHandler UnmarkedForDraggingEvent;
#pragma warning restore 0067
#endregion
public IEnumerable<IViewerEdge> InEdges {
get { foreach (var e in Node.InEdges) yield return funcFromDrawingEdgeToVEdge(e); }
}
public IEnumerable<IViewerEdge> OutEdges {
get { foreach (var e in Node.OutEdges) yield return funcFromDrawingEdgeToVEdge(e); }
}
public IEnumerable<IViewerEdge> SelfEdges {
get { foreach (var e in Node.SelfEdges) yield return funcFromDrawingEdgeToVEdge(e); }
}
public void InvalidateNodeDot(double nodeDotWidth)
{
if (!Node.IsVisible)
{
foreach (var fe in FrameworkElements)
fe.Visibility = Visibility.Hidden;
return;
}
BoundaryPath.Data = GetNodeDotEllipseStreamGeometry(nodeDotWidth);
BoundaryCurveIsDirty = false;
}
public void HideNodeLabel()
{
FrameworkElementOfNodeForLabel.Visibility = Visibility.Hidden;
}
public void InvalidateNodeLabel(double labelHeight, double labelWidth, Point offset)
{
if (LgNodeInfo == null) return;
FrameworkElementOfNodeForLabel.Height = labelHeight;
FrameworkElementOfNodeForLabel.Width = labelWidth;
Common.PositionFrameworkElement(FrameworkElementOfNodeForLabel, Node.BoundingBox.Center+offset, 1);
if (Node.IsVisible) {
FrameworkElementOfNodeForLabel.Visibility = Visibility.Visible;
}
}
public void Invalidate() {
SetFillAndStroke();
}
public override string ToString() {
return Node.Id;
}
protected bool BoundaryCurveIsDirty { get; set; }
internal void DetouchFromCanvas(Canvas graphCanvas) {
if (BoundaryPath != null)
graphCanvas.Children.Remove(BoundaryPath);
if (FrameworkElementOfNodeForLabel != null)
graphCanvas.Children.Remove(FrameworkElementOfNodeForLabel);
}
byte Low(byte b)
{
return (byte)(b/3);
}
private Brush GetSelBrushColor()
{
if (lgSettings != null)
{
var col = lgSettings.GetNodeSelColor();
var brush = (SolidColorBrush)(new BrushConverter().ConvertFrom(col));
return brush;
}
else
{
return Brushes.Red;
}
}
internal void SetLowTransparency()
{
if (BoundaryPath != null) {
var col = Node.Attr.Color;
BoundaryPath.Stroke =
Common.BrushFromMsaglColor(new Drawing.Color(Low(col.A), Low(col.R), Low(col.G), Low(col.B)));
var fill = BoundaryPath.Fill as SolidColorBrush;
if (fill != null)
BoundaryPath.Fill =
new SolidColorBrush(Color.FromArgb(200, fill.Color.R, fill.Color.G, fill.Color.B));
}
var textBlock = FrameworkElementOfNodeForLabel as TextBlock;
if (textBlock != null)
{
var col = Node.Label.FontColor;
textBlock.Foreground = Common.BrushFromMsaglColor(new Drawing.Color(Low(col.A), Low(col.R), Low(col.G), Low(col.B)));
}
}
}
}
| |
#region Namespaces
using System;
using System.Windows.Forms;
#endregion
namespace Epi.Windows.Controls
{
/// <summary>
/// A dragable label to be used in MakeView's questionnaire designer
/// </summary>
public class DragableLabel : PairedLabel, IDragable, IFieldControl
{
#region Private Members
private int x;
private int y;
private bool hasMoved = false;
private int fieldId;
private int horizontalDistanceToControl;
private int verticalDistanceToControl;
private bool isMouseDown = false;
private Epi.Fields.Field field;
private ControlTracker controlTracker;
private Enums.TrackerStatus trackerStatus;
#endregion
#region Constructors
/// <summary>
/// Constructor for the class
/// </summary>
public DragableLabel()
{
InitializeComponent();
}
#endregion
#region Override Methods
/// <summary>
/// Overrides the label's OnPaint event
/// </summary>
/// <param name="e">Parameters for the paint event</param>
protected override void OnPaint(System.Windows.Forms.PaintEventArgs e)
{
if (Text == string.Empty)
{
ControlPaint.DrawBorder(e.Graphics, this.ClientRectangle, System.Drawing.Color.LightGray, ButtonBorderStyle.Solid);
}
else
{
base.OnPaint (e);
}
}
#endregion
#region Private Methods
/// <summary>
/// Initializes the dragable label
/// </summary>
private void InitializeComponent()
{
base.DragOver += new System.Windows.Forms.DragEventHandler(this.DragableLabel_DragOver);
base.MouseMove += new System.Windows.Forms.MouseEventHandler(this.DragableLabel_MouseMove);
base.MouseLeave += new System.EventHandler(this.DragableLabel_MouseLeave);
base.MouseDown += new System.Windows.Forms.MouseEventHandler(this.DragableLabel_MouseDown);
}
#endregion
#region Public Properties
/// <summary>
/// Gets or sets the horizontal distance of the mouse from the edge of the label
/// </summary>
public int XOffset
{
get
{
return this.x;
}
set
{
this.x = value;
}
}
/// <summary>
/// Gets or sets the vertical distance of the mouse from the edge of the control
/// </summary>
public int YOffset
{
get
{
return this.y;
}
set
{
this.y = value;
}
}
/// <summary>
/// Gets or sets the ID of the MakeView field referenced by the control
/// </summary>
public int FieldId
{
get
{
return fieldId;
}
set
{
fieldId = value;
}
}
/// <summary>
/// Gets and sets the field this control is associated with.
/// </summary>
public Epi.Fields.Field Field
{
get
{
return field;
}
set
{
field = value;
}
}
/// <summary>
/// Gets and sets the ControlTracker this control is associated with.
/// </summary>
public ControlTracker ControlTracker
{
get
{
return controlTracker;
}
set
{
controlTracker = value;
}
}
/// <summary>
/// Gets or sets whether or not the dynamic control has moved
/// </summary>
public bool HasMoved
{
get
{
return hasMoved;
}
set
{
hasMoved = value;
}
}
/// <summary>
/// Gets the horizontal distance from the label to the control
/// </summary>
public int HorizontalDistanceToControl
{
get
{
return this.horizontalDistanceToControl;
}
}
/// <summary>
/// Gets the vertical distance from the label to the control
/// </summary>
public int VerticalDistanceToControl
{
get
{
return this.verticalDistanceToControl;
}
}
#endregion
#region Event Handlers
/// <summary>
/// Handles the mouse-down event of the label
/// </summary>
/// <param name="sender">.NET supplied object</param>
/// <param name="e">.NET supplied event parameters</param>
private void DragableLabel_MouseDown(object sender, System.Windows.Forms.MouseEventArgs e)
{
isMouseDown = true;
x = e.X;
y = e.Y;
if (this.LabelFor != null)
{
horizontalDistanceToControl = this.LabelFor.Left - this.Left;
verticalDistanceToControl = this.LabelFor.Top - this.Top;
}
}
/// <summary>
/// Handles the mouse-move event over the label
/// </summary>
/// <param name="sender">.NET supplied object</param>
/// <param name="e">.NET supplied event parameters</param>
private void DragableLabel_MouseMove(object sender, System.Windows.Forms.MouseEventArgs e)
{
if (isMouseDown)
{
DataObject data = new DataObject("DragControl",this);
this.DoDragDrop(data, DragDropEffects.Move);
isMouseDown = false;
this.hasMoved = true;
}
}
/// <summary>
/// Handles the mouse-leave event of the label
/// </summary>
/// <param name="sender">.NET supplied object</param>
/// <param name="e">.NET supplied event parameters</param>
private void DragableLabel_MouseLeave(object sender, System.EventArgs e)
{
isMouseDown = false;
}
/// <summary>
/// Handles the drag-over event of the label
/// </summary>
/// <param name="sender">.NET supplied object</param>
/// <param name="e">.NET supplied event parameters</param>
private void DragableLabel_DragOver(object sender, System.Windows.Forms.DragEventArgs e)
{
e.Effect = DragDropEffects.Move;
}
#endregion
#region IFieldControl Members
/// <summary>
/// IFieldControl implementation
/// </summary>
public bool IsFieldGroup
{
get
{
return false;
}
set
{
// do nothing
}
}
/// <summary>
/// IFieldControl implementation
/// </summary>
public Epi.Fields.GroupField GroupField
{
get
{
return null;
}
set
{
// do nothing
}
}
public ControlTracker Tracker
{
get { return controlTracker; }
set { controlTracker = value; }
}
public Enums.TrackerStatus TrackerStatus
{
get { return trackerStatus; }
set
{
controlTracker.TrackerStatus = value;
this.trackerStatus = value;
}
}
#endregion
}
}
| |
//
// Combatant.cs
//
// Author:
// Trent McPheron <[email protected]>
//
// Copyright (c) 2010-2011 Trent McPheron
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
using System;
using Gtk;
using Glade;
namespace Arbiter {
public class Combatant : Bin {
///////////////////
// Fields
///////////////////
private Sport sport;
private float hp;
private short mp;
private int lastPrimary;
private int lastSecondary;
private bool primaryChosen;
private bool secondaryChosen;
private bool targetChosen;
private bool finalTwo;
[Widget] private HBox combatantWidget;
[Widget] private Label nameLabel;
[Widget] private Label mpLabel;
[Widget] private Entry hpEntry;
[Widget] private Entry mpEntry;
[Widget] private CheckButton priFancyCheck;
[Widget] private CheckButton priFeintCheck;
[Widget] private CheckButton secFancyCheck;
[Widget] private CheckButton secFeintCheck;
[Widget] private CheckButton sdCheck;
[Widget] private CheckButton eliminateCheck;
[Widget] private ComboBox primaryCombo;
[Widget] private ComboBox targetCombo;
[Widget] private ComboBox secondaryCombo;
[Widget] private Label targetLabel;
[Widget] private Label secondaryLabel;
///////////////////
// Properties
///////////////////
public string CName { get; private set; }
// Various aspects of resolution state.
public bool Acted { get; set; }
public bool Defended { get; set; }
public bool FullFancy { get; set; }
// Automatically handles entry updating.
public float HP {
get {
return hp;
} set {
hp = value;
hpEntry.Text = hp.ToString(sport.ScoreFormat);
}
}
public short MP {
get {
return mp;
} set {
mp = value;
mpEntry.Text = mp.ToString();
}
}
// Checks to see if the moves are valid.
private bool PrimaryValid {
get {
return (primaryCombo.ActiveText == "Disengage") ||
(Primary != lastPrimary && Primary != lastSecondary &&
((Primary != Secondary) || (secondaryCombo.ActiveText ==
"Disengage")));
}
}
private bool SecondaryValid {
get {
return (secondaryCombo.ActiveText == "Disengage") ||
(Secondary != lastSecondary && ((Secondary != Primary) ||
(primaryCombo.ActiveText == "Disengage")));
}
}
// Checks to see if the combatant's entire selection is valid.
public bool Valid {
get {
return primaryChosen && ((secondaryChosen && targetChosen) ||
FinalTwo);
} set {
primaryChosen = secondaryChosen = targetChosen = value;
}
}
// These translate the selected items in the comboboxes into publically
// accessible ints.
public int Primary {
get {
return primaryCombo.Active;
}
}
public int Secondary {
get {
return secondaryCombo.Active;
}
}
public int Target {
get {
return targetCombo.Active;
} set {
targetCombo.Active = value;
}
}
// These translate the active states of the checkboxes into publically
// accessible bools.
public bool PriFancy {
get {
return priFancyCheck.Active;
}
}
public bool PriFeint {
get {
return priFeintCheck.Active;
}
}
public bool SecFancy {
get {
return secFancyCheck.Active;
}
}
public bool SecFeint {
get {
return secFeintCheck.Active;
}
}
public bool SD {
get {
return sdCheck.Active;
}
}
public bool Eliminate {
get {
return eliminateCheck.Active;
}
}
// This will be activated when there are only two combatants remaining in
// the brawl. It desensitizes the target and secondary move widgets.
public bool FinalTwo
{
get {
return finalTwo;
} set {
finalTwo = value;
lastPrimary = -1;
lastSecondary = -1;
secondaryCombo.Active = -1;
targetLabel.Sensitive = !value;
targetCombo.Sensitive = !value;
sdCheck.Sensitive = !value;
secondaryLabel.Sensitive = !value;
secondaryCombo.Sensitive = !value;
secFancyCheck.Sensitive = !value;
secFeintCheck.Sensitive = !value;
}
}
///////////////////
// Constructor
///////////////////
public Combatant (string name, float hp, short mp, Sport sport, bool sd) :
base() {
// Load the widgets.
XML xml = new XML("Combatant.glade", "combatantWidget");
xml.Autoconnect(this);
this.Add(combatantWidget);
// Set initial values.
this.sport = sport;
CName = name;
nameLabel.Markup = "<b>" + name + "</b>";
HP = hp;
MP = mp;
// Allow manual editing of HP and MP.
hpEntry.FocusOutEvent += delegate {
HP = Single.Parse(hpEntry.Text);
};
mpEntry.FocusOutEvent += delegate {
MP = Int16.Parse(mpEntry.Text);
};
hpEntry.Activated += delegate {
HP = Single.Parse(hpEntry.Text);
};
mpEntry.Activated += delegate {
MP = Int16.Parse(mpEntry.Text);
};
// Determine widget visiblity.
mpEntry.NoShowAll = !(mpEntry.Visible = (sport.Fancies ||
sport.Feints));
mpLabel.NoShowAll = !(mpEntry.Visible = (sport.Fancies ||
sport.Feints));
priFancyCheck.NoShowAll = !(priFancyCheck.Visible = sport.Fancies);
priFeintCheck.NoShowAll = !(priFeintCheck.Visible = sport.Feints);
secFancyCheck.NoShowAll = !(secFancyCheck.Visible = sport.Fancies);
secFeintCheck.NoShowAll = !(secFeintCheck.Visible = sport.Feints);
sdCheck.NoShowAll = !(sdCheck.Visible = sd);
// Assign combobox lists.
ListStore ls = new ListStore(typeof(string));
foreach (string s in sport.Moves) {
ls.AppendValues(s);
}
primaryCombo.Model = ls;
secondaryCombo.Model = ls;
targetCombo.Model = Brawl.Order;
// Set last move selections and bools.
lastPrimary = -1;
lastSecondary = -1;
primaryChosen = false;
secondaryChosen = false;
targetChosen = false;
finalTwo = false;
// Checkbutton exvent handlers.
priFancyCheck.Toggled += delegate(object sender, EventArgs args) {
priFeintCheck.Active = false; VerifyMod(priFancyCheck);
};
priFeintCheck.Toggled += delegate(object sender, EventArgs args) {
priFancyCheck.Active = false; VerifyMod(priFeintCheck);
};
secFancyCheck.Toggled += delegate(object sender, EventArgs args) {
secFeintCheck.Active = false; VerifyMod(secFancyCheck);
};
secFeintCheck.Toggled += delegate(object sender, EventArgs args) {
secFancyCheck.Active = false; VerifyMod(secFeintCheck);
};
// Combobox event handlers. Both active and popup-shown are used
// because the active event is only emitted when the chosen item
// is different from the last, and the popup-shown event is
// emitted before the active item actually changes.
primaryCombo.AddNotification("popup-shown", VerifyPrimary);
secondaryCombo.AddNotification("popup-shown", VerifySecondary);
targetCombo.AddNotification("popup-shown", VerifyTarget);
primaryCombo.AddNotification("active", VerifyPrimary);
secondaryCombo.AddNotification("active", VerifySecondary);
targetCombo.AddNotification("active", VerifyTarget);
// Participate in size negotiation.
SizeRequested += delegate (object sender, SizeRequestedArgs args) {
args.Requisition = combatantWidget.SizeRequest();
};
SizeAllocated += delegate (object sender, SizeAllocatedArgs args) {
combatantWidget.Allocation = args.Allocation;
};
}
///////////////////
// Methods
///////////////////
// Verifies move selection.
public void VerifyPrimary (object sender, GLib.NotifyArgs args) {
// Since there's no undoer, it's safer to just ensure that a move
// is chosen...
primaryChosen = true;
// But color the combobox text depending on validity.
primaryCombo.Child.ModifyText(StateType.Normal, (PrimaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
primaryCombo.Child.ModifyText(StateType.Prelight, (PrimaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
// Do the same for the other box if it's been chosen yet.
if (secondaryChosen) {
secondaryCombo.Child.ModifyText(StateType.Normal, (SecondaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
secondaryCombo.Child.ModifyText(StateType.Prelight, (SecondaryValid
? new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
}
// Check to see if the resolver can be enabled.
Brawl.CheckResolve();
}
// Verifies secondary move selection.
public void VerifySecondary (object sender, GLib.NotifyArgs args) {
// Since there's no undoer, it's safer to just ensure that a move
// is chosen...
secondaryChosen = true;
// But color the combobox text depending on validity.
secondaryCombo.Child.ModifyText(StateType.Normal, (SecondaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
secondaryCombo.Child.ModifyText(StateType.Prelight, (SecondaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
// Do the same for the other box if it's been chosen yet.
if (primaryChosen) {
primaryCombo.Child.ModifyText(StateType.Normal, (PrimaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
primaryCombo.Child.ModifyText(StateType.Prelight, (PrimaryValid ?
new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
}
// Check to see if the resolver can be enabled.
Brawl.CheckResolve();
}
// Verifies target selection. Really, the only point of this one is to
// inform the main brawler that a target has been selected.
public void VerifyTarget (object sender, GLib.NotifyArgs args) {
targetChosen = true;
// Inform the caller if the combatant is targeting self.
targetCombo.Child.ModifyText(StateType.Normal, (targetCombo.ActiveText
!= CName ? new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
targetCombo.Child.ModifyText(StateType.Prelight, (targetCombo.ActiveText
!= CName ? new Gdk.Color(0, 128, 0) : new Gdk.Color(192, 0, 0)));
// Check to see if the resolver can be enabled.
Brawl.CheckResolve();
}
// Ensures a mod is not activated when the combatant has insufficient MP.
public void VerifyMod (CheckButton check) {
if ((Convert.ToInt16(PriFancy) + Convert.ToInt16(PriFeint) +
Convert.ToInt16(SecFancy) + Convert.ToInt16(SecFeint) > MP)) {
check.Active = false;
}
}
// Automatically makes the combatant's moves valid when manual elimination
// is requested.
public void VerifyElimination (object sender, EventArgs args) {
if (Eliminate) {
Valid = true;
}
Brawl.CheckResolve();
}
// Resets all the duelist's attributes for the next round.
public void Reset () {
// Uncheck all the checkboxes.
priFancyCheck.Active = false;
priFeintCheck.Active = false;
secFancyCheck.Active = false;
secFeintCheck.Active = false;
sdCheck.Active = false;
eliminateCheck.Active = false;
// Change the attributes of the combo boxes.
primaryCombo.Child.ModifyText(StateType.Normal,
new Gdk.Color(128, 128, 128));
primaryCombo.Child.ModifyText(StateType.Prelight,
new Gdk.Color(128, 128, 128));
secondaryCombo.Child.ModifyText(StateType.Normal,
new Gdk.Color(128, 128, 128));
secondaryCombo.Child.ModifyText(StateType.Prelight,
new Gdk.Color(128, 128, 128));
targetCombo.Child.ModifyText(StateType.Normal,
new Gdk.Color(128, 128, 128));
targetCombo.Child.ModifyText(StateType.Prelight,
new Gdk.Color(128, 128, 128));
// Store the last moves.
lastPrimary = Primary;
lastSecondary = Secondary;
// Reset the other variables.
Acted = false;
Defended = false;
FullFancy = false;
Valid = false;
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsAzureBodyDurationAllSync
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Test Infrastructure for AutoRest
/// </summary>
public partial class AutoRestDurationTestService : ServiceClient<AutoRestDurationTestService>, IAutoRestDurationTestService, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
public ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IDurationOperations.
/// </summary>
public virtual IDurationOperations Duration { get; private set; }
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestDurationTestService(params DelegatingHandler[] handlers) : base(handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected AutoRestDurationTestService(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
this.Initialize();
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected AutoRestDurationTestService(Uri baseUri, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected AutoRestDurationTestService(Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this.BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestDurationTestService(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestDurationTestService(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestDurationTestService(Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the AutoRestDurationTestService class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public AutoRestDurationTestService(Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this.BaseUri = baseUri;
this.Credentials = credentials;
if (this.Credentials != null)
{
this.Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// An optional partial-method to perform custom initialization.
/// </summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
this.Duration = new DurationOperations(this);
this.BaseUri = new Uri("https://localhost");
this.AcceptLanguage = "en-US";
this.LongRunningOperationRetryTimeout = 30;
this.GenerateClientRequestId = true;
SerializationSettings = new JsonSerializerSettings
{
Formatting = Formatting.Indented,
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = DateTimeZoneHandling.Utc,
NullValueHandling = NullValueHandling.Ignore,
ReferenceLoopHandling = ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
CustomInitialize();
DeserializationSettings.Converters.Add(new CloudErrorJsonConverter());
}
}
}
| |
namespace BizHawk.Client.EmuHawk
{
partial class RamPoke
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing)
{
if (disposing && (components != null))
{
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(RamPoke));
this.label1 = new System.Windows.Forms.Label();
this.OK = new System.Windows.Forms.Button();
this.Cancel = new System.Windows.Forms.Button();
this.OutputLabel = new System.Windows.Forms.Label();
this.ValeLabel = new System.Windows.Forms.Label();
this.ValueBox = new BizHawk.Client.EmuHawk.WatchValueBox();
this.ValueHexLabel = new System.Windows.Forms.Label();
this.DisplayTypeLabel = new System.Windows.Forms.Label();
this.SizeLabel = new System.Windows.Forms.Label();
this.BigEndianLabel = new System.Windows.Forms.Label();
this.AddressBox = new BizHawk.Client.EmuHawk.HexTextBox();
this.label2 = new System.Windows.Forms.Label();
this.label3 = new System.Windows.Forms.Label();
this.label4 = new System.Windows.Forms.Label();
this.label5 = new System.Windows.Forms.Label();
this.DomainLabel = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// label1
//
this.label1.AutoSize = true;
this.label1.Location = new System.Drawing.Point(20, 33);
this.label1.Name = "label1";
this.label1.Size = new System.Drawing.Size(62, 13);
this.label1.TabIndex = 0;
this.label1.Text = "Address: 0x";
//
// OK
//
this.OK.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.OK.Location = new System.Drawing.Point(12, 169);
this.OK.Name = "OK";
this.OK.Size = new System.Drawing.Size(65, 23);
this.OK.TabIndex = 35;
this.OK.Text = "&Poke";
this.OK.UseVisualStyleBackColor = true;
this.OK.Click += new System.EventHandler(this.Ok_Click);
//
// Cancel
//
this.Cancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.Cancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.Cancel.Location = new System.Drawing.Point(136, 169);
this.Cancel.Name = "Cancel";
this.Cancel.Size = new System.Drawing.Size(65, 23);
this.Cancel.TabIndex = 40;
this.Cancel.Text = "&Close";
this.Cancel.UseVisualStyleBackColor = true;
this.Cancel.Click += new System.EventHandler(this.Cancel_Click);
//
// OutputLabel
//
this.OutputLabel.AutoSize = true;
this.OutputLabel.Location = new System.Drawing.Point(12, 7);
this.OutputLabel.Name = "OutputLabel";
this.OutputLabel.Size = new System.Drawing.Size(129, 13);
this.OutputLabel.TabIndex = 9;
this.OutputLabel.Text = "Enter an address to poke:";
//
// ValeLabel
//
this.ValeLabel.AutoSize = true;
this.ValeLabel.Location = new System.Drawing.Point(31, 59);
this.ValeLabel.Name = "ValeLabel";
this.ValeLabel.Size = new System.Drawing.Size(37, 13);
this.ValeLabel.TabIndex = 10;
this.ValeLabel.Text = "Value:";
//
// ValueBox
//
this.ValueBox.ByteSize = BizHawk.Client.Common.WatchSize.Byte;
this.ValueBox.CharacterCasing = System.Windows.Forms.CharacterCasing.Upper;
this.ValueBox.Location = new System.Drawing.Point(82, 57);
this.ValueBox.MaxLength = 2;
this.ValueBox.Name = "ValueBox";
this.ValueBox.Nullable = false;
this.ValueBox.Size = new System.Drawing.Size(116, 20);
this.ValueBox.TabIndex = 10;
this.ValueBox.Text = "00";
this.ValueBox.Type = BizHawk.Client.Common.DisplayType.Hex;
//
// ValueHexLabel
//
this.ValueHexLabel.AutoSize = true;
this.ValueHexLabel.Location = new System.Drawing.Point(64, 60);
this.ValueHexLabel.Name = "ValueHexLabel";
this.ValueHexLabel.Size = new System.Drawing.Size(18, 13);
this.ValueHexLabel.TabIndex = 11;
this.ValueHexLabel.Text = "0x";
//
// DisplayTypeLabel
//
this.DisplayTypeLabel.AutoSize = true;
this.DisplayTypeLabel.Location = new System.Drawing.Point(81, 118);
this.DisplayTypeLabel.Name = "DisplayTypeLabel";
this.DisplayTypeLabel.Size = new System.Drawing.Size(52, 13);
this.DisplayTypeLabel.TabIndex = 24;
this.DisplayTypeLabel.Text = "Unsigned";
//
// SizeLabel
//
this.SizeLabel.AutoSize = true;
this.SizeLabel.Location = new System.Drawing.Point(82, 101);
this.SizeLabel.Name = "SizeLabel";
this.SizeLabel.Size = new System.Drawing.Size(28, 13);
this.SizeLabel.TabIndex = 23;
this.SizeLabel.Text = "Byte";
//
// BigEndianLabel
//
this.BigEndianLabel.AutoSize = true;
this.BigEndianLabel.Location = new System.Drawing.Point(82, 135);
this.BigEndianLabel.Name = "BigEndianLabel";
this.BigEndianLabel.Size = new System.Drawing.Size(58, 13);
this.BigEndianLabel.TabIndex = 41;
this.BigEndianLabel.Text = "Big Endian";
//
// AddressBox
//
this.AddressBox.CharacterCasing = System.Windows.Forms.CharacterCasing.Upper;
this.AddressBox.Enabled = false;
this.AddressBox.Location = new System.Drawing.Point(82, 30);
this.AddressBox.MaxLength = 8;
this.AddressBox.Name = "AddressBox";
this.AddressBox.Nullable = false;
this.AddressBox.Size = new System.Drawing.Size(116, 20);
this.AddressBox.TabIndex = 5;
this.AddressBox.Text = "0000";
//
// label2
//
this.label2.AutoSize = true;
this.label2.Location = new System.Drawing.Point(39, 135);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(43, 13);
this.label2.TabIndex = 44;
this.label2.Text = "Endian:";
//
// label3
//
this.label3.AutoSize = true;
this.label3.Location = new System.Drawing.Point(11, 118);
this.label3.Name = "label3";
this.label3.Size = new System.Drawing.Size(71, 13);
this.label3.TabIndex = 43;
this.label3.Text = "Display Type:";
//
// label4
//
this.label4.AutoSize = true;
this.label4.Location = new System.Drawing.Point(52, 101);
this.label4.Name = "label4";
this.label4.Size = new System.Drawing.Size(30, 13);
this.label4.TabIndex = 42;
this.label4.Text = "Size:";
//
// label5
//
this.label5.AutoSize = true;
this.label5.Location = new System.Drawing.Point(36, 84);
this.label5.Name = "label5";
this.label5.Size = new System.Drawing.Size(46, 13);
this.label5.TabIndex = 46;
this.label5.Text = "Domain:";
//
// DomainLabel
//
this.DomainLabel.AutoSize = true;
this.DomainLabel.Location = new System.Drawing.Point(82, 84);
this.DomainLabel.Name = "DomainLabel";
this.DomainLabel.Size = new System.Drawing.Size(70, 13);
this.DomainLabel.TabIndex = 45;
this.DomainLabel.Text = "Main Memory";
//
// RamPoke
//
this.AcceptButton = this.OK;
this.AutoScaleDimensions = new System.Drawing.SizeF(6F, 13F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.Cancel;
this.ClientSize = new System.Drawing.Size(213, 208);
this.Controls.Add(this.label5);
this.Controls.Add(this.DomainLabel);
this.Controls.Add(this.label2);
this.Controls.Add(this.label3);
this.Controls.Add(this.label4);
this.Controls.Add(this.BigEndianLabel);
this.Controls.Add(this.DisplayTypeLabel);
this.Controls.Add(this.SizeLabel);
this.Controls.Add(this.ValueHexLabel);
this.Controls.Add(this.ValueBox);
this.Controls.Add(this.ValeLabel);
this.Controls.Add(this.OutputLabel);
this.Controls.Add(this.Cancel);
this.Controls.Add(this.OK);
this.Controls.Add(this.AddressBox);
this.Controls.Add(this.label1);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.Icon = ((System.Drawing.Icon)(resources.GetObject("$this.Icon")));
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "RamPoke";
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "RAM Poke";
this.Load += new System.EventHandler(this.RamPoke_Load);
this.ResumeLayout(false);
this.PerformLayout();
}
#endregion
private System.Windows.Forms.Label label1;
private HexTextBox AddressBox;
private System.Windows.Forms.Button OK;
private System.Windows.Forms.Button Cancel;
private System.Windows.Forms.Label OutputLabel;
private System.Windows.Forms.Label ValeLabel;
private WatchValueBox ValueBox;
private System.Windows.Forms.Label ValueHexLabel;
private System.Windows.Forms.Label DisplayTypeLabel;
private System.Windows.Forms.Label SizeLabel;
private System.Windows.Forms.Label BigEndianLabel;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.Label label5;
private System.Windows.Forms.Label DomainLabel;
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using Microsoft.Protocols.TestTools.StackSdk.RemoteDesktop.Rdpeudp;
using Microsoft.Protocols.TestTools.StackSdk.Security.Sspi;
using System.Security.Cryptography.X509Certificates;
using Microsoft.Protocols.TestTools.ExtendedLogging;
using Microsoft.Protocols.TestTools.StackSdk.RemoteDesktop.Rdpbcgr;
namespace Microsoft.Protocols.TestTools.StackSdk.RemoteDesktop.Rdpemt
{
public class RdpeudpDTLSChannel : ISecureChannel
{
#region Private variables
/// <summary>
/// milliseconds for method sleep time when wait for data
/// </summary>
const int waitInterval = 50;
/// <summary>
/// The time in seconds to wait for next packets from low layer transport.
/// </summary>
public TimeSpan timeout = new TimeSpan(0, 0, 20);
/// <summary>
/// Short wait time
/// </summary>
private TimeSpan shortWaitTime = new TimeSpan(0, 0, 0, 0, 100);
/// <summary>
/// A RdpeudpSocket which is the transport of this security channel
/// </summary>
private RdpeudpSocket rdpeudpSocket;
/// <summary>
/// buffer used for received data, which is received from RDPEUDP transport.
/// </summary>
private List<byte[]> receivedBuffer;
/// <summary>
/// buffer used for data to be sent, which will be sent to RDPEUDP transport
/// </summary>
private List<byte[]> toSendBuffer;
/// <summary>
/// Security context, which used by DTLS server.
/// Accept client token to get server token.
/// </summary>
private DtlsServerSecurityContext dtlsServerContext;
/// <summary>
/// SecurityContext used by DTLS client.
/// Supports DTLS 1.0
/// Invokes InitializeSecurityContext function of SSPI
/// </summary>
private DtlsClientSecurityContext dtlsClientContext;
private SecurityPackageContextStreamSizes dtlsStreamSizes;
private bool isAuthenticated;
#endregion Private variables
#region Properties
/// <summary>
/// Whether this channel has been authenticated
/// </summary>
public bool IsAuthenticated
{
get
{
return isAuthenticated;
}
}
#endregion Properties
#region Constructor
/// <summary>
/// Constructor
/// </summary>
/// <param name="eudpSocket">RDPEUDP Socket, which is transport</param>
public RdpeudpDTLSChannel(RdpeudpSocket eudpSocket)
{
if (eudpSocket == null || eudpSocket.TransMode == TransportMode.Reliable)
{
throw new NotSupportedException("RdpeudpSocket is null or it is a socket for reliable RDPEUDP connection.");
}
this.rdpeudpSocket = eudpSocket;
this.rdpeudpSocket.Received += ReceiveBytes;
isAuthenticated = false;
receivedBuffer = new List<byte[]>();
toSendBuffer = new List<byte[]>();
if(eudpSocket.AutoHandle)
{
// Check whether there is packets in unprocessed packet buffer
RdpeudpPacket packet = eudpSocket.ExpectPacket(shortWaitTime);
if (packet != null)
{
eudpSocket.ReceivePacket(packet);
}
}
}
#endregion Constructor
#region Public Methods
#region Implemented ISecurityChannel Interfaces
/// <summary>
/// Send bytes through this security channel
/// </summary>
/// <param name="data"></param>
/// <param name="timeout"></param>
public void Send(byte[] data)
{
if (data != null && data.Length > 0)
{
List<byte[]> toSentList = Encrypt(data);
if (toSentList != null)
{
foreach (byte[] toSentData in toSentList)
{
rdpeudpSocket.Send(toSentData);
}
}
// ETW Provider Dump Message
string messageName = "RDPEMT:SentPDU";
ExtendedLogger.DumpMessage(messageName, RdpbcgrUtility.DumpLevel_Layer2, "RDPEMT Sent PDU", data);
}
}
/// <summary>
/// Event be called when received data
/// </summary>
public event ReceiveData Received;
#endregion Implemented ISecurityChannel Interfaces
#region Authenticate methods
/// <summary>
/// Called by servers to authenticate the server and optionally the client in
/// a client-server connection using the specified certificate.
/// </summary>
/// <param name="cert">The certificate used to authenticate the server.</param>
public void AuthenticateAsServer(X509Certificate cert)
{
// Using thread in threadpool to manage the authentication process
ThreadPool.QueueUserWorkItem(AuthenticateAsServer, cert);
DateTime endTime = DateTime.Now + timeout;
if (rdpeudpSocket.AutoHandle)
{
while (!IsAuthenticated && DateTime.Now < endTime)
{
Thread.Sleep(waitInterval);
}
if (!IsAuthenticated)
{
throw new TimeoutException("Time out when Authenticate as Server!");
}
}
}
/// <summary>
/// Called by clients to authenticate the server and optionally the client in
/// a client-server connection.
/// </summary>
/// <param name="targetHost">The name of the server that shares this System.Net.Security.SslStream.</param>
/// <param name="certValCallback">A System.Net.Security.RemoteCertificateValidationCallback delegate responsible for validating the certificate supplied by the remote party.</param>
public void AuthenticateAsClient(string targetHost)
{
// Using thread in threadpool to manage the authentication process
ThreadPool.QueueUserWorkItem(AuthenticateAsClient, targetHost);
DateTime endTime = DateTime.Now + timeout;
if (rdpeudpSocket.AutoHandle)
{
while (!IsAuthenticated && DateTime.Now < endTime)
{
Thread.Sleep(waitInterval);
}
if (!IsAuthenticated)
{
throw new TimeoutException("Time out when Authenticate as Client!");
}
}
}
#endregion Authenticate methods
/// <summary>
/// Process bytes received from
/// </summary>
/// <param name="data"></param>
public void ReceiveBytes(byte[] data)
{
if (data != null && data.Length > 0)
{
if (isAuthenticated)
{
byte[] decryptedData = Decrypt(data);
// ETW Provider Dump Message
string messageName = "RDPEMT:ReceivedPDU";
ExtendedLogger.DumpMessage(messageName, RdpbcgrUtility.DumpLevel_Layer2, "RDPEMT Received PDU", decryptedData);
if (Received != null)
{
Received(decryptedData);
}
}
else
{
this.AddReceivedData(data);
}
}
}
/// <summary>
/// Send source Data to remote endpoint through DTLS transport.
/// </summary>
/// <param name="data">The sending data.</param>
public List<byte[]> Encrypt(byte[] data)
{
if (data == null) return null;
List<byte[]> encryptedDataList = new List<byte[]>();
int consumedLen = 0;
while (data.Length - consumedLen > 0)
{
int toSendLen = (int)Math.Min(data.Length - consumedLen, dtlsStreamSizes.MaximumMessage);
byte[] dataToSend = new byte[toSendLen];
Array.Copy(data, consumedLen, dataToSend, 0, toSendLen);
SecurityBuffer streamHd = new SecurityBuffer(SecurityBufferType.StreamHeader, new byte[dtlsStreamSizes.Header]);
SecurityBuffer dataBuffer = new SecurityBuffer(SecurityBufferType.Data, dataToSend);
SecurityBuffer streamTl = new SecurityBuffer(SecurityBufferType.StreamTrailer, new byte[dtlsStreamSizes.Trailer]);
SecurityBuffer emptyBuffer = new SecurityBuffer(SecurityBufferType.Empty, null);
if (dtlsServerContext != null)
{
dtlsServerContext.Encrypt(streamHd, dataBuffer, streamTl, emptyBuffer);
}
else
{
dtlsClientContext.Encrypt(streamHd, dataBuffer, streamTl, emptyBuffer);
}
byte[] dtlsEncrptedData = new byte[streamHd.Buffer.Length + dataBuffer.Buffer.Length + streamTl.Buffer.Length];
Array.Copy(streamHd.Buffer, dtlsEncrptedData, streamHd.Buffer.Length);
Array.Copy(dataBuffer.Buffer, 0, dtlsEncrptedData, streamHd.Buffer.Length, dataBuffer.Buffer.Length);
Array.Copy(streamTl.Buffer, 0, dtlsEncrptedData, streamHd.Buffer.Length + dataBuffer.Buffer.Length, streamTl.Buffer.Length);
encryptedDataList.Add(dtlsEncrptedData);
consumedLen += toSendLen;
}
return encryptedDataList;
}
/// <summary>
/// Decrypt data
/// </summary>
/// <param name="data"></param>
/// <returns></returns>
public byte[] Decrypt(byte[] data)
{
if (data == null || data.Length == 0)
{
return null;
}
byte[] dtlsDataBuffer = new byte[data.Length];
Array.Copy(data, dtlsDataBuffer, data.Length);
SecurityBuffer dataBuffer = new SecurityBuffer(SecurityBufferType.Data, dtlsDataBuffer);
SecurityBuffer emptyBuffer1 = new SecurityBuffer(SecurityBufferType.Empty, null);
SecurityBuffer emptyBuffer2 = new SecurityBuffer(SecurityBufferType.Empty, null);
SecurityBuffer emptyBuffer3 = new SecurityBuffer(SecurityBufferType.Empty, null);
if (dtlsServerContext != null)
{
dtlsServerContext.Decrypt(dataBuffer, emptyBuffer1, emptyBuffer2, emptyBuffer3);
}
else
{
dtlsClientContext.Decrypt(dataBuffer, emptyBuffer1, emptyBuffer2, emptyBuffer3);
}
SecurityBuffer decryptedDataBuffer =
(dataBuffer.BufferType == SecurityBufferType.Data) ? dataBuffer :
(emptyBuffer1.BufferType == SecurityBufferType.Data) ? emptyBuffer1 :
(emptyBuffer2.BufferType == SecurityBufferType.Data) ? emptyBuffer2 :
(emptyBuffer3.BufferType == SecurityBufferType.Data) ? emptyBuffer3 : null;
if (decryptedDataBuffer != null)
{
return decryptedDataBuffer.Buffer;
}
else
{
return null;
}
}
/// <summary>
/// Get data to sent, the data is Write by SSL Stream
/// The data is encrypted by SSL Stream
/// </summary>
/// <param name="timeout"></param>
/// <returns></returns>
public byte[] GetDataToSent(TimeSpan timeout)
{
DateTime endtime = DateTime.Now + timeout;
while (DateTime.Now < endtime)
{
if (toSendBuffer.Count > 0)
{
lock (toSendBuffer)
{
if (toSendBuffer.Count > 0)
{
byte[] returnData = toSendBuffer[0];
toSendBuffer.RemoveAt(0);
return returnData;
}
}
}
Thread.Sleep(waitInterval);
}
return null;
}
/// <summary>
/// Dispose this object
/// </summary>
public void Dispose()
{
if (rdpeudpSocket != null)
{
rdpeudpSocket.Received -= ReceiveBytes;
}
if ( dtlsServerContext!= null)
{
dtlsServerContext.Dispose();
}
if (dtlsClientContext != null)
{
dtlsClientContext.Dispose();
}
}
#endregion
#region Private Methods
/// <summary>
/// Add received data to this stream
/// SSL Stream will read this data and decrypt it
/// </summary>
/// <param name="data"></param>
private void AddReceivedData(byte[] data)
{
if (data != null && data.Length > 0)
{
byte[] receivedData = (byte[])data.Clone();
lock (receivedBuffer)
{
receivedBuffer.Add(receivedData);
}
}
}
/// <summary>
/// Get Received Data from receiveBuffer
/// This private method is used during authentication
/// </summary>
/// <param name="timeout">Timeout</param>
/// <returns></returns>
private byte[] GetReceivedData(TimeSpan timeout)
{
DateTime endtime = DateTime.Now + timeout;
while (DateTime.Now < endtime)
{
if (receivedBuffer.Count > 0)
{
lock (receivedBuffer)
{
if (receivedBuffer.Count > 0)
{
byte[] returnData = receivedBuffer[0];
receivedBuffer.RemoveAt(0);
return returnData;
}
}
}
Thread.Sleep(waitInterval);
}
return null;
}
/// <summary>
/// Add data to toSendBuffer
/// </summary>
/// <param name="data"></param>
private void AddDataToSent(byte[] data)
{
if (data != null && data.Length > 0)
{
byte[] toSendData = (byte[])data.Clone();
lock (toSendBuffer)
{
toSendBuffer.Add(toSendData);
}
}
}
/// <summary>
/// Called by servers to authenticate the server and optionally the client in
/// a client-server connection using the specified certificate.
/// </summary>
/// <param name="cert">The certificate used to authenticate the server.</param>
private void AuthenticateAsServer(object cert)
{
if (cert is X509Certificate)
{
dtlsServerContext = new DtlsServerSecurityContext(
SecurityPackageType.Schannel,
new CertificateCredential((X509Certificate)cert),
null,
ServerSecurityContextAttribute.ReplayDetect | ServerSecurityContextAttribute.SequenceDetect |
ServerSecurityContextAttribute.Confidentiality | ServerSecurityContextAttribute.ExtendedError |
ServerSecurityContextAttribute.AllocMemory | ServerSecurityContextAttribute.Datagram,
SecurityTargetDataRepresentation.SecurityNativeDrep);
try
{
// First accept.
byte[] clientToken = this.GetReceivedData(this.timeout);
dtlsServerContext.Accept(clientToken);
this.SendData(dtlsServerContext.Token);
while (dtlsServerContext.NeedContinueProcessing)
{
if (dtlsServerContext.HasMoreFragments)
{
dtlsServerContext.Accept(null);
}
else
{
clientToken = this.GetReceivedData(this.timeout);
dtlsServerContext.Accept(clientToken);
}
if (dtlsServerContext.Token != null)
{
this.SendData(dtlsServerContext.Token);
}
}
isAuthenticated = true;
dtlsStreamSizes = dtlsServerContext.StreamSizes;
}
catch
{
// Don't throw exception in ThreadPool thread
}
}
}
/// <summary>
/// Called by clients to authenticate the server and optionally the client in
/// a client-server connection.
/// </summary>
/// <param name="targetHost">The name of the server that share this connection.</param>
public void AuthenticateAsClient(object targetHost)
{
if (targetHost is string)
{
dtlsClientContext = new DtlsClientSecurityContext(
SecurityPackageType.Schannel,
null,
(string)targetHost,
ClientSecurityContextAttribute.ReplayDetect | ClientSecurityContextAttribute.SequenceDetect |
ClientSecurityContextAttribute.Confidentiality | ClientSecurityContextAttribute.ExtendedError |
ClientSecurityContextAttribute.AllocMemory | ClientSecurityContextAttribute.Datagram |
ClientSecurityContextAttribute.UseSuppliedCreds,
SecurityTargetDataRepresentation.SecurityNativeDrep);
try
{
// First Initialize.
byte[] serverToken = null;
dtlsClientContext.Initialize(serverToken);
this.SendData(dtlsClientContext.Token);
while (dtlsClientContext.NeedContinueProcessing)
{
if (dtlsClientContext.HasMoreFragments)
{
dtlsClientContext.Initialize(null);
}
else
{
serverToken = this.GetReceivedData(this.timeout);
dtlsClientContext.Initialize(serverToken);
}
if (dtlsClientContext.Token != null)
{
this.SendData(dtlsClientContext.Token);
}
}
isAuthenticated = true;
dtlsStreamSizes = dtlsClientContext.StreamSizes;
}
catch
{
// Don't throw exception in ThreadPool thread
}
}
}
/// <summary>
/// Method used during authentication.
/// If RDPEUDP Socket is autohandle, send data throw RDPEUDP Socket
/// If RDPEUDP Socket is not autohandle, save data to dataToSent buffer
/// </summary>
/// <param name="data"></param>
private void SendData(byte[] data)
{
if (data != null && data.Length > 0)
{
if (rdpeudpSocket.AutoHandle)
{
rdpeudpSocket.Send(data);
}
else
{
this.AddDataToSent(data);
}
}
}
#endregion Private Methods
}
}
| |
using System;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Net.Sockets;
using System.Threading;
namespace Nowin
{
public class SaeaLayerCallback : ITransportLayerCallback, IDisposable
{
static bool _runtimeCorrectlyImplementsDisconnectReuseSocket;
static SaeaLayerCallback()
{
_runtimeCorrectlyImplementsDisconnectReuseSocket = Type.GetType("Mono.Runtime") == null;
}
[Flags]
enum State
{
Receive = 1,
Send = 2,
Disconnect = 4,
Aborting = 8,
DelayedAccept = 16
}
readonly ITransportLayerHandler _handler;
readonly Socket _listenSocket;
readonly Server _server;
readonly int _handlerId;
SocketAsyncEventArgs _receiveEvent;
SocketAsyncEventArgs _sendEvent;
SocketAsyncEventArgs _disconnectEvent;
Socket _socket;
#pragma warning disable 420
volatile int _state;
private Func<IDisposable> _contextSuppresser;
public SaeaLayerCallback(ITransportLayerHandler handler, Socket listenSocket, Server server, int handlerId, ExecutionContextFlow contextFlow)
{
_handler = handler;
_listenSocket = listenSocket;
_server = server;
_handlerId = handlerId;
_contextSuppresser = ExecutionContextFlowSuppresser.CreateContextSuppresser(contextFlow);
RecreateSaeas();
handler.Callback = this;
}
void RecreateSaeas()
{
DisposeEventArgs();
_receiveEvent = new SocketAsyncEventArgs();
_sendEvent = new SocketAsyncEventArgs();
_disconnectEvent = new SocketAsyncEventArgs();
_receiveEvent.Completed += IoCompleted;
_sendEvent.Completed += IoCompleted;
_disconnectEvent.Completed += IoCompleted;
_receiveEvent.DisconnectReuseSocket = _runtimeCorrectlyImplementsDisconnectReuseSocket;
_sendEvent.DisconnectReuseSocket = _runtimeCorrectlyImplementsDisconnectReuseSocket;
_disconnectEvent.DisconnectReuseSocket = _runtimeCorrectlyImplementsDisconnectReuseSocket;
_receiveEvent.UserToken = this;
_sendEvent.UserToken = this;
_disconnectEvent.UserToken = this;
}
void DisposeEventArgs()
{
if (_receiveEvent != null)
{
_receiveEvent.Dispose();
}
if (_sendEvent != null)
{
_sendEvent.Dispose();
}
if (_disconnectEvent != null)
{
_disconnectEvent.Dispose();
}
}
static void IoCompleted(object sender, SocketAsyncEventArgs e)
{
var self = (SaeaLayerCallback)e.UserToken;
TraceSources.CoreDebug.TraceInformation("ID{0,-5} IoCompleted {1} {2} {3} {4}", self._handlerId, e.LastOperation, e.Offset, e.BytesTransferred, e.SocketError);
switch (e.LastOperation)
{
case SocketAsyncOperation.Accept:
Debug.Assert(e == self._receiveEvent);
if (e.SocketError != SocketError.Success)
{
return;
}
self.ProcessAccept();
break;
case SocketAsyncOperation.Receive:
Debug.Assert(e == self._receiveEvent);
self.ProcessReceive();
break;
case SocketAsyncOperation.Send:
Debug.Assert(e == self._sendEvent);
self.ProcessSend();
break;
case SocketAsyncOperation.Disconnect:
Debug.Assert(e == self._disconnectEvent);
self.ProcessDisconnect();
break;
default:
throw new ArgumentException("The last operation completed on the socket was not expected");
}
}
void ProcessAccept()
{
int oldState, newState;
do
{
oldState = _state;
newState = oldState & ~(int)State.Receive;
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
var bytesTransfered = _receiveEvent.BytesTransferred;
var socketError = _receiveEvent.SocketError;
if (bytesTransfered >= 0 && socketError == SocketError.Success)
{
_socket = _receiveEvent.AcceptSocket;
IPEndPoint remoteEndpoint = null;
IPEndPoint localEndpoint = null;
try
{
remoteEndpoint = _socket.RemoteEndPoint as IPEndPoint;
localEndpoint = _socket.LocalEndPoint as IPEndPoint;
}
catch (SocketException) //"The socket is not connected" is intentionally ignored
{ }
if (remoteEndpoint != null && localEndpoint != null)
{
_server.ReportNewConnectedClient();
_handler.FinishAccept(_receiveEvent.Buffer, _receiveEvent.Offset, bytesTransfered,
remoteEndpoint, localEndpoint);
return;
}
}
// Current socket could be corrupted Windows returns InvalidArguments nonsense.
RecreateSaeas();
_handler.PrepareAccept();
}
void ProcessReceive()
{
bool postponedAccept;
var bytesTransferred = _receiveEvent.BytesTransferred;
if (bytesTransferred > 0 && _receiveEvent.SocketError == SocketError.Success)
{
int oldState, newState;
do
{
oldState = _state;
postponedAccept = (oldState & (int)State.DelayedAccept) != 0;
newState = oldState & ~(int)(State.Receive | State.DelayedAccept);
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
_handler.FinishReceive(_receiveEvent.Buffer, _receiveEvent.Offset, bytesTransferred);
}
else
{
int oldState, newState;
do
{
oldState = _state;
postponedAccept = (oldState & (int)State.DelayedAccept) != 0;
newState = (oldState & ~(int)(State.Receive | State.DelayedAccept));
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
_handler.FinishReceive(null, 0, -1);
}
if (postponedAccept)
_handler.PrepareAccept();
}
void ProcessSend()
{
int oldState, newState;
do
{
oldState = _state;
newState = oldState & ~(int)State.Send;
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
Exception ex = null;
if (_sendEvent.SocketError != SocketError.Success)
{
ex = new IOException();
}
_handler.FinishSend(ex);
}
void ProcessDisconnect()
{
bool delayedAccept;
int oldState, newState;
do
{
oldState = _state;
delayedAccept = (oldState & (int)State.Receive) != 0;
newState = (oldState & ~(int)(State.Disconnect | State.Aborting)) | (delayedAccept ? (int)State.DelayedAccept : 0);
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
if (!_runtimeCorrectlyImplementsDisconnectReuseSocket)
{
_receiveEvent.AcceptSocket = null;
_socket.Close();
_socket.Dispose();
}
_socket = null;
_server.ReportDisconnectedClient();
if (!delayedAccept)
_handler.PrepareAccept();
}
public void StartAccept(byte[] buffer, int offset, int length)
{
TraceSources.CoreDebug.TraceInformation("ID{0,-5} start accept {1} {2}", _handlerId, offset, length);
int oldState, newState;
do
{
oldState = _state;
if ((oldState & (int)State.Receive) != 0)
throw new InvalidOperationException("Already receiving or accepting");
newState = oldState | (int)State.Receive & ~(int)State.Aborting;
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
bool willRaiseEvent;
try
{
_receiveEvent.SetBuffer(buffer, offset, length);
using (StopExecutionContextFlow())
willRaiseEvent = _listenSocket.AcceptAsync(_receiveEvent);
}
catch (ObjectDisposedException)
{
return;
}
if (!willRaiseEvent)
{
var e = _receiveEvent;
TraceSources.CoreDebug.TraceInformation("ID{0,-5} Sync Accept {1} {2} {3} {4}", _handlerId, e.LastOperation, e.Offset, e.BytesTransferred, e.SocketError);
ProcessAccept();
}
}
IDisposable StopExecutionContextFlow()
{
return _contextSuppresser();
}
public void StartReceive(byte[] buffer, int offset, int length)
{
TraceSources.CoreDebug.TraceInformation("ID{0,-5} start receive {1} {2}", _handlerId, offset, length);
int oldState, newState;
do
{
oldState = _state;
if ((oldState & (int)State.Receive) != 0)
throw new InvalidOperationException("Already receiving or accepting");
newState = oldState | (int)State.Receive;
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
bool willRaiseEvent;
try
{
_receiveEvent.SetBuffer(buffer, offset, length);
using (StopExecutionContextFlow())
willRaiseEvent = _socket.ReceiveAsync(_receiveEvent);
}
catch (ObjectDisposedException)
{
return;
}
if (!willRaiseEvent)
{
var e = _receiveEvent;
TraceSources.CoreDebug.TraceInformation("ID{0,-5} Sync Receive {1} {2} {3} {4}", _handlerId, e.LastOperation, e.Offset, e.BytesTransferred, e.SocketError);
ProcessReceive();
}
}
public void StartSend(byte[] buffer, int offset, int length)
{
TraceSources.CoreDebug.TraceInformation("ID{0,-5} start send {1} {2}", _handlerId, offset, length);
int oldState, newState;
do
{
oldState = _state;
if ((oldState & (int)State.Send) != 0)
throw new InvalidOperationException("Already sending");
newState = oldState | (int)State.Send;
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
bool willRaiseEvent;
try
{
_sendEvent.SetBuffer(buffer, offset, length);
using (StopExecutionContextFlow())
willRaiseEvent = _socket.SendAsync(_sendEvent);
}
catch (ObjectDisposedException)
{
return;
}
if (!willRaiseEvent)
{
var e = _sendEvent;
TraceSources.CoreDebug.TraceInformation("ID{0,-5} Sync Send {1} {2} {3} {4}", _handlerId, e.LastOperation, e.Offset, e.BytesTransferred, e.SocketError);
ProcessSend();
}
}
public void StartDisconnect()
{
TraceSources.CoreDebug.TraceInformation("ID{0,-5} start disconnect", _handlerId);
int oldState, newState;
do
{
oldState = _state;
if ((oldState & (int)State.Disconnect) != 0)
throw new InvalidOperationException("Already disconnecting");
newState = oldState | (int)State.Disconnect;
} while (Interlocked.CompareExchange(ref _state, newState, oldState) != oldState);
bool willRaiseEvent;
try
{
using (StopExecutionContextFlow())
{
var s = _socket;
if (s == null)
return;
willRaiseEvent = s.DisconnectAsync(_disconnectEvent);
}
}
catch (ObjectDisposedException)
{
return;
}
if (!willRaiseEvent)
{
var e = _disconnectEvent;
TraceSources.CoreDebug.TraceInformation("ID{0,-5} Sync Disconnect {1} {2} {3} {4}", _handlerId, e.LastOperation, e.Offset, e.BytesTransferred, e.SocketError);
ProcessDisconnect();
}
}
public void Dispose()
{
DisposeEventArgs();
var s = _socket;
if (s != null)
{
s.Dispose();
}
}
}
}
| |
// <copyright file="Keys.cs" company="WebDriver Committers">
// Copyright 2015 Software Freedom Conservancy
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Text;
namespace OpenQA.Selenium
{
/// <summary>
/// Representations of keys able to be pressed that are not text keys for sending to the browser.
/// </summary>
public static class Keys
{
/// <summary>
/// Represents the NUL keystroke.
/// </summary>
public static readonly string Null = Convert.ToString(Convert.ToChar(0xE000, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Cancel keystroke.
/// </summary>
public static readonly string Cancel = Convert.ToString(Convert.ToChar(0xE001, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Help keystroke.
/// </summary>
public static readonly string Help = Convert.ToString(Convert.ToChar(0xE002, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Backspace key.
/// </summary>
public static readonly string Backspace = Convert.ToString(Convert.ToChar(0xE003, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Tab key.
/// </summary>
public static readonly string Tab = Convert.ToString(Convert.ToChar(0xE004, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Clear keystroke.
/// </summary>
public static readonly string Clear = Convert.ToString(Convert.ToChar(0xE005, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Return key.
/// </summary>
public static readonly string Return = Convert.ToString(Convert.ToChar(0xE006, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Enter key.
/// </summary>
public static readonly string Enter = Convert.ToString(Convert.ToChar(0xE007, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Shift key.
/// </summary>
public static readonly string Shift = Convert.ToString(Convert.ToChar(0xE008, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Shift key.
/// </summary>
public static readonly string LeftShift = Convert.ToString(Convert.ToChar(0xE008, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the Control key.
/// </summary>
public static readonly string Control = Convert.ToString(Convert.ToChar(0xE009, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Control key.
/// </summary>
public static readonly string LeftControl = Convert.ToString(Convert.ToChar(0xE009, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the Alt key.
/// </summary>
public static readonly string Alt = Convert.ToString(Convert.ToChar(0xE00A, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Alt key.
/// </summary>
public static readonly string LeftAlt = Convert.ToString(Convert.ToChar(0xE00A, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the Pause key.
/// </summary>
public static readonly string Pause = Convert.ToString(Convert.ToChar(0xE00B, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Escape key.
/// </summary>
public static readonly string Escape = Convert.ToString(Convert.ToChar(0xE00C, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Spacebar key.
/// </summary>
public static readonly string Space = Convert.ToString(Convert.ToChar(0xE00D, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Page Up key.
/// </summary>
public static readonly string PageUp = Convert.ToString(Convert.ToChar(0xE00E, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Page Down key.
/// </summary>
public static readonly string PageDown = Convert.ToString(Convert.ToChar(0xE00F, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the End key.
/// </summary>
public static readonly string End = Convert.ToString(Convert.ToChar(0xE010, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Home key.
/// </summary>
public static readonly string Home = Convert.ToString(Convert.ToChar(0xE011, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the left arrow key.
/// </summary>
public static readonly string Left = Convert.ToString(Convert.ToChar(0xE012, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the left arrow key.
/// </summary>
public static readonly string ArrowLeft = Convert.ToString(Convert.ToChar(0xE012, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the up arrow key.
/// </summary>
public static readonly string Up = Convert.ToString(Convert.ToChar(0xE013, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the up arrow key.
/// </summary>
public static readonly string ArrowUp = Convert.ToString(Convert.ToChar(0xE013, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the right arrow key.
/// </summary>
public static readonly string Right = Convert.ToString(Convert.ToChar(0xE014, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the right arrow key.
/// </summary>
public static readonly string ArrowRight = Convert.ToString(Convert.ToChar(0xE014, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the Left arrow key.
/// </summary>
public static readonly string Down = Convert.ToString(Convert.ToChar(0xE015, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Left arrow key.
/// </summary>
public static readonly string ArrowDown = Convert.ToString(Convert.ToChar(0xE015, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture); // alias
/// <summary>
/// Represents the Insert key.
/// </summary>
public static readonly string Insert = Convert.ToString(Convert.ToChar(0xE016, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the Delete key.
/// </summary>
public static readonly string Delete = Convert.ToString(Convert.ToChar(0xE017, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the semi-colon key.
/// </summary>
public static readonly string Semicolon = Convert.ToString(Convert.ToChar(0xE018, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the equal sign key.
/// </summary>
public static readonly string Equal = Convert.ToString(Convert.ToChar(0xE019, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
// Number pad keys
/// <summary>
/// Represents the number pad 0 key.
/// </summary>
public static readonly string NumberPad0 = Convert.ToString(Convert.ToChar(0xE01A, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 1 key.
/// </summary>
public static readonly string NumberPad1 = Convert.ToString(Convert.ToChar(0xE01B, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 2 key.
/// </summary>
public static readonly string NumberPad2 = Convert.ToString(Convert.ToChar(0xE01C, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 3 key.
/// </summary>
public static readonly string NumberPad3 = Convert.ToString(Convert.ToChar(0xE01D, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 4 key.
/// </summary>
public static readonly string NumberPad4 = Convert.ToString(Convert.ToChar(0xE01E, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 5 key.
/// </summary>
public static readonly string NumberPad5 = Convert.ToString(Convert.ToChar(0xE01F, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 6 key.
/// </summary>
public static readonly string NumberPad6 = Convert.ToString(Convert.ToChar(0xE020, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 7 key.
/// </summary>
public static readonly string NumberPad7 = Convert.ToString(Convert.ToChar(0xE021, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 8 key.
/// </summary>
public static readonly string NumberPad8 = Convert.ToString(Convert.ToChar(0xE022, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad 9 key.
/// </summary>
public static readonly string NumberPad9 = Convert.ToString(Convert.ToChar(0xE023, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad multiplication key.
/// </summary>
public static readonly string Multiply = Convert.ToString(Convert.ToChar(0xE024, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad addition key.
/// </summary>
public static readonly string Add = Convert.ToString(Convert.ToChar(0xE025, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad thousands separator key.
/// </summary>
public static readonly string Separator = Convert.ToString(Convert.ToChar(0xE026, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad subtraction key.
/// </summary>
public static readonly string Subtract = Convert.ToString(Convert.ToChar(0xE027, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad decimal separator key.
/// </summary>
public static readonly string Decimal = Convert.ToString(Convert.ToChar(0xE028, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the number pad division key.
/// </summary>
public static readonly string Divide = Convert.ToString(Convert.ToChar(0xE029, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
// Function keys
/// <summary>
/// Represents the function key F1.
/// </summary>
public static readonly string F1 = Convert.ToString(Convert.ToChar(0xE031, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F2.
/// </summary>
public static readonly string F2 = Convert.ToString(Convert.ToChar(0xE032, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F3.
/// </summary>
public static readonly string F3 = Convert.ToString(Convert.ToChar(0xE033, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F4.
/// </summary>
public static readonly string F4 = Convert.ToString(Convert.ToChar(0xE034, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F5.
/// </summary>
public static readonly string F5 = Convert.ToString(Convert.ToChar(0xE035, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F6.
/// </summary>
public static readonly string F6 = Convert.ToString(Convert.ToChar(0xE036, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F7.
/// </summary>
public static readonly string F7 = Convert.ToString(Convert.ToChar(0xE037, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F8.
/// </summary>
public static readonly string F8 = Convert.ToString(Convert.ToChar(0xE038, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F9.
/// </summary>
public static readonly string F9 = Convert.ToString(Convert.ToChar(0xE039, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F10.
/// </summary>
public static readonly string F10 = Convert.ToString(Convert.ToChar(0xE03A, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F11.
/// </summary>
public static readonly string F11 = Convert.ToString(Convert.ToChar(0xE03B, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key F12.
/// </summary>
public static readonly string F12 = Convert.ToString(Convert.ToChar(0xE03C, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key META.
/// </summary>
public static readonly string Meta = Convert.ToString(Convert.ToChar(0xE03D, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
/// <summary>
/// Represents the function key COMMAND.
/// </summary>
public static readonly string Command = Convert.ToString(Convert.ToChar(0xE03D, CultureInfo.InvariantCulture), CultureInfo.InvariantCulture);
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Linq;
using System.Text;
namespace EduHub.Data.Entities
{
/// <summary>
/// Award Details Data Set
/// </summary>
[GeneratedCode("EduHub Data", "0.9")]
public sealed partial class PSADataSet : EduHubDataSet<PSA>
{
/// <inheritdoc />
public override string Name { get { return "PSA"; } }
/// <inheritdoc />
public override bool SupportsEntityLastModified { get { return true; } }
internal PSADataSet(EduHubContext Context)
: base(Context)
{
Index_PSAKEY = new Lazy<Dictionary<string, PSA>>(() => this.ToDictionary(i => i.PSAKEY));
}
/// <summary>
/// Matches CSV file headers to actions, used to deserialize <see cref="PSA" />
/// </summary>
/// <param name="Headers">The CSV column headers</param>
/// <returns>An array of actions which deserialize <see cref="PSA" /> fields for each CSV column header</returns>
internal override Action<PSA, string>[] BuildMapper(IReadOnlyList<string> Headers)
{
var mapper = new Action<PSA, string>[Headers.Count];
for (var i = 0; i < Headers.Count; i++) {
switch (Headers[i]) {
case "PSAKEY":
mapper[i] = (e, v) => e.PSAKEY = v;
break;
case "DESCRIPTION":
mapper[i] = (e, v) => e.DESCRIPTION = v;
break;
case "AWARD_TYPE":
mapper[i] = (e, v) => e.AWARD_TYPE = v;
break;
case "LW_DATE":
mapper[i] = (e, v) => e.LW_DATE = v == null ? (DateTime?)null : DateTime.ParseExact(v, "d/MM/yyyy h:mm:ss tt", CultureInfo.InvariantCulture);
break;
case "LW_TIME":
mapper[i] = (e, v) => e.LW_TIME = v == null ? (short?)null : short.Parse(v);
break;
case "LW_USER":
mapper[i] = (e, v) => e.LW_USER = v;
break;
default:
mapper[i] = MapperNoOp;
break;
}
}
return mapper;
}
/// <summary>
/// Merges <see cref="PSA" /> delta entities
/// </summary>
/// <param name="Entities">Iterator for base <see cref="PSA" /> entities</param>
/// <param name="DeltaEntities">List of delta <see cref="PSA" /> entities</param>
/// <returns>A merged <see cref="IEnumerable{PSA}"/> of entities</returns>
internal override IEnumerable<PSA> ApplyDeltaEntities(IEnumerable<PSA> Entities, List<PSA> DeltaEntities)
{
HashSet<string> Index_PSAKEY = new HashSet<string>(DeltaEntities.Select(i => i.PSAKEY));
using (var deltaIterator = DeltaEntities.GetEnumerator())
{
using (var entityIterator = Entities.GetEnumerator())
{
while (deltaIterator.MoveNext())
{
var deltaClusteredKey = deltaIterator.Current.PSAKEY;
bool yieldEntity = false;
while (entityIterator.MoveNext())
{
var entity = entityIterator.Current;
bool overwritten = Index_PSAKEY.Remove(entity.PSAKEY);
if (entity.PSAKEY.CompareTo(deltaClusteredKey) <= 0)
{
if (!overwritten)
{
yield return entity;
}
}
else
{
yieldEntity = !overwritten;
break;
}
}
yield return deltaIterator.Current;
if (yieldEntity)
{
yield return entityIterator.Current;
}
}
while (entityIterator.MoveNext())
{
yield return entityIterator.Current;
}
}
}
}
#region Index Fields
private Lazy<Dictionary<string, PSA>> Index_PSAKEY;
#endregion
#region Index Methods
/// <summary>
/// Find PSA by PSAKEY field
/// </summary>
/// <param name="PSAKEY">PSAKEY value used to find PSA</param>
/// <returns>Related PSA entity</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public PSA FindByPSAKEY(string PSAKEY)
{
return Index_PSAKEY.Value[PSAKEY];
}
/// <summary>
/// Attempt to find PSA by PSAKEY field
/// </summary>
/// <param name="PSAKEY">PSAKEY value used to find PSA</param>
/// <param name="Value">Related PSA entity</param>
/// <returns>True if the related PSA entity is found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public bool TryFindByPSAKEY(string PSAKEY, out PSA Value)
{
return Index_PSAKEY.Value.TryGetValue(PSAKEY, out Value);
}
/// <summary>
/// Attempt to find PSA by PSAKEY field
/// </summary>
/// <param name="PSAKEY">PSAKEY value used to find PSA</param>
/// <returns>Related PSA entity, or null if not found</returns>
/// <exception cref="ArgumentOutOfRangeException">No match was found</exception>
public PSA TryFindByPSAKEY(string PSAKEY)
{
PSA value;
if (Index_PSAKEY.Value.TryGetValue(PSAKEY, out value))
{
return value;
}
else
{
return null;
}
}
#endregion
#region SQL Integration
/// <summary>
/// Returns a <see cref="SqlCommand"/> which checks for the existence of a PSA table, and if not found, creates the table and associated indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
public override SqlCommand GetSqlCreateTableCommand(SqlConnection SqlConnection)
{
return new SqlCommand(
connection: SqlConnection,
cmdText:
@"IF NOT EXISTS (SELECT * FROM dbo.sysobjects WHERE id = OBJECT_ID(N'[dbo].[PSA]') AND OBJECTPROPERTY(id, N'IsUserTable') = 1)
BEGIN
CREATE TABLE [dbo].[PSA](
[PSAKEY] varchar(10) NOT NULL,
[DESCRIPTION] varchar(30) NULL,
[AWARD_TYPE] varchar(3) NULL,
[LW_DATE] datetime NULL,
[LW_TIME] smallint NULL,
[LW_USER] varchar(128) NULL,
CONSTRAINT [PSA_Index_PSAKEY] PRIMARY KEY CLUSTERED (
[PSAKEY] ASC
)
);
END");
}
/// <summary>
/// Returns null as <see cref="PSADataSet"/> has no non-clustered indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>null</returns>
public override SqlCommand GetSqlDisableIndexesCommand(SqlConnection SqlConnection)
{
return null;
}
/// <summary>
/// Returns null as <see cref="PSADataSet"/> has no non-clustered indexes.
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <returns>null</returns>
public override SqlCommand GetSqlRebuildIndexesCommand(SqlConnection SqlConnection)
{
return null;
}
/// <summary>
/// Returns a <see cref="SqlCommand"/> which deletes the <see cref="PSA"/> entities passed
/// </summary>
/// <param name="SqlConnection">The <see cref="SqlConnection"/> to be associated with the <see cref="SqlCommand"/></param>
/// <param name="Entities">The <see cref="PSA"/> entities to be deleted</param>
public override SqlCommand GetSqlDeleteCommand(SqlConnection SqlConnection, IEnumerable<PSA> Entities)
{
SqlCommand command = new SqlCommand();
int parameterIndex = 0;
StringBuilder builder = new StringBuilder();
List<string> Index_PSAKEY = new List<string>();
foreach (var entity in Entities)
{
Index_PSAKEY.Add(entity.PSAKEY);
}
builder.AppendLine("DELETE [dbo].[PSA] WHERE");
// Index_PSAKEY
builder.Append("[PSAKEY] IN (");
for (int index = 0; index < Index_PSAKEY.Count; index++)
{
if (index != 0)
builder.Append(", ");
// PSAKEY
var parameterPSAKEY = $"@p{parameterIndex++}";
builder.Append(parameterPSAKEY);
command.Parameters.Add(parameterPSAKEY, SqlDbType.VarChar, 10).Value = Index_PSAKEY[index];
}
builder.Append(");");
command.Connection = SqlConnection;
command.CommandText = builder.ToString();
return command;
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the PSA data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the PSA data set</returns>
public override EduHubDataSetDataReader<PSA> GetDataSetDataReader()
{
return new PSADataReader(Load());
}
/// <summary>
/// Provides a <see cref="IDataReader"/> for the PSA data set
/// </summary>
/// <returns>A <see cref="IDataReader"/> for the PSA data set</returns>
public override EduHubDataSetDataReader<PSA> GetDataSetDataReader(List<PSA> Entities)
{
return new PSADataReader(new EduHubDataSetLoadedReader<PSA>(this, Entities));
}
// Modest implementation to primarily support SqlBulkCopy
private class PSADataReader : EduHubDataSetDataReader<PSA>
{
public PSADataReader(IEduHubDataSetReader<PSA> Reader)
: base (Reader)
{
}
public override int FieldCount { get { return 6; } }
public override object GetValue(int i)
{
switch (i)
{
case 0: // PSAKEY
return Current.PSAKEY;
case 1: // DESCRIPTION
return Current.DESCRIPTION;
case 2: // AWARD_TYPE
return Current.AWARD_TYPE;
case 3: // LW_DATE
return Current.LW_DATE;
case 4: // LW_TIME
return Current.LW_TIME;
case 5: // LW_USER
return Current.LW_USER;
default:
throw new ArgumentOutOfRangeException(nameof(i));
}
}
public override bool IsDBNull(int i)
{
switch (i)
{
case 1: // DESCRIPTION
return Current.DESCRIPTION == null;
case 2: // AWARD_TYPE
return Current.AWARD_TYPE == null;
case 3: // LW_DATE
return Current.LW_DATE == null;
case 4: // LW_TIME
return Current.LW_TIME == null;
case 5: // LW_USER
return Current.LW_USER == null;
default:
return false;
}
}
public override string GetName(int ordinal)
{
switch (ordinal)
{
case 0: // PSAKEY
return "PSAKEY";
case 1: // DESCRIPTION
return "DESCRIPTION";
case 2: // AWARD_TYPE
return "AWARD_TYPE";
case 3: // LW_DATE
return "LW_DATE";
case 4: // LW_TIME
return "LW_TIME";
case 5: // LW_USER
return "LW_USER";
default:
throw new ArgumentOutOfRangeException(nameof(ordinal));
}
}
public override int GetOrdinal(string name)
{
switch (name)
{
case "PSAKEY":
return 0;
case "DESCRIPTION":
return 1;
case "AWARD_TYPE":
return 2;
case "LW_DATE":
return 3;
case "LW_TIME":
return 4;
case "LW_USER":
return 5;
default:
throw new ArgumentOutOfRangeException(nameof(name));
}
}
}
#endregion
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
namespace DiscUtils.Wim
{
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Security.AccessControl;
using System.Text.RegularExpressions;
/// <summary>
/// Provides access to the file system within a WIM file image.
/// </summary>
public class WimFileSystem : ReadOnlyDiscFileSystem, IWindowsFileSystem
{
private WimFile _file;
private List<RawSecurityDescriptor> _securityDescriptors;
private Stream _metaDataStream;
private ObjectCache<long, List<DirectoryEntry>> _dirCache;
private long _rootDirPos;
internal WimFileSystem(WimFile file, int index)
{
_file = file;
ShortResourceHeader metaDataFileInfo = _file.LocateImage(index);
if (metaDataFileInfo == null)
{
throw new ArgumentException("No such image: " + index, "index");
}
_metaDataStream = _file.OpenResourceStream(metaDataFileInfo);
ReadSecurityDescriptors();
_dirCache = new ObjectCache<long, List<DirectoryEntry>>();
}
/// <summary>
/// Provides a friendly description of the file system type.
/// </summary>
public override string FriendlyName
{
get { return "Microsoft WIM"; }
}
/// <summary>
/// Gets the security descriptor associated with the file or directory.
/// </summary>
/// <param name="path">The file or directory to inspect.</param>
/// <returns>The security descriptor.</returns>
public RawSecurityDescriptor GetSecurity(string path)
{
uint id = GetEntry(path).SecurityId;
if (id == uint.MaxValue)
{
return null;
}
else if (id >= 0 && id < _securityDescriptors.Count)
{
return _securityDescriptors[(int)id];
}
else
{
// What if there is no descriptor?
throw new NotImplementedException();
}
}
/// <summary>
/// Sets the security descriptor associated with the file or directory.
/// </summary>
/// <param name="path">The file or directory to change.</param>
/// <param name="securityDescriptor">The new security descriptor.</param>
public void SetSecurity(string path, RawSecurityDescriptor securityDescriptor)
{
throw new NotSupportedException();
}
/// <summary>
/// Gets the reparse point data associated with a file or directory.
/// </summary>
/// <param name="path">The file to query</param>
/// <returns>The reparse point information</returns>
public ReparsePoint GetReparsePoint(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
ShortResourceHeader hdr = _file.LocateResource(dirEntry.Hash);
if (hdr == null)
{
throw new IOException("No reparse point");
}
using (Stream s = _file.OpenResourceStream(hdr))
{
byte[] buffer = new byte[s.Length];
s.Read(buffer, 0, buffer.Length);
return new ReparsePoint((int)dirEntry.ReparseTag, buffer);
}
}
/// <summary>
/// Sets the reparse point data on a file or directory.
/// </summary>
/// <param name="path">The file to set the reparse point on.</param>
/// <param name="reparsePoint">The new reparse point.</param>
public void SetReparsePoint(string path, ReparsePoint reparsePoint)
{
throw new NotSupportedException();
}
/// <summary>
/// Removes a reparse point from a file or directory, without deleting the file or directory.
/// </summary>
/// <param name="path">The path to the file or directory to remove the reparse point from</param>
public void RemoveReparsePoint(string path)
{
throw new NotSupportedException();
}
/// <summary>
/// Gets the short name for a given path.
/// </summary>
/// <param name="path">The path to convert</param>
/// <returns>The short name</returns>
/// <remarks>
/// This method only gets the short name for the final part of the path, to
/// convert a complete path, call this method repeatedly, once for each path
/// segment. If there is no short name for the given path,<c>null</c> is
/// returned.
/// </remarks>
public string GetShortName(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
return dirEntry.ShortName;
}
/// <summary>
/// Sets the short name for a given file or directory.
/// </summary>
/// <param name="path">The full path to the file or directory to change.</param>
/// <param name="shortName">The shortName, which should not include a path.</param>
public void SetShortName(string path, string shortName)
{
throw new NotSupportedException();
}
/// <summary>
/// Gets the standard file information for a file.
/// </summary>
/// <param name="path">The full path to the file or directory to query.</param>
/// <returns>The standard file information</returns>
public WindowsFileInformation GetFileStandardInformation(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
return new WindowsFileInformation
{
CreationTime = DateTime.FromFileTimeUtc(dirEntry.CreationTime),
LastAccessTime = DateTime.FromFileTimeUtc(dirEntry.LastAccessTime),
ChangeTime = DateTime.FromFileTimeUtc(Math.Max(dirEntry.LastWriteTime, Math.Max(dirEntry.CreationTime, dirEntry.LastAccessTime))),
LastWriteTime = DateTime.FromFileTimeUtc(dirEntry.LastWriteTime),
FileAttributes = dirEntry.Attributes
};
}
/// <summary>
/// Sets the standard file information for a file.
/// </summary>
/// <param name="path">The full path to the file or directory to query.</param>
/// <param name="info">The standard file information</param>
public void SetFileStandardInformation(string path, WindowsFileInformation info)
{
throw new NotSupportedException();
}
/// <summary>
/// Gets the names of the alternate data streams for a file.
/// </summary>
/// <param name="path">The path to the file</param>
/// <returns>
/// The list of alternate data streams (or empty, if none). To access the contents
/// of the alternate streams, use OpenFile(path + ":" + name, ...).
/// </returns>
public string[] GetAlternateDataStreams(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
List<string> names = new List<string>();
if (dirEntry.AlternateStreams != null)
{
foreach (var altStream in dirEntry.AlternateStreams)
{
if (!string.IsNullOrEmpty(altStream.Key))
{
names.Add(altStream.Key);
}
}
}
return names.ToArray();
}
/// <summary>
/// Gets the file id for a given path.
/// </summary>
/// <param name="path">The path to get the id of</param>
/// <returns>The file id, or -1</returns>
/// <remarks>
/// The returned file id uniquely identifies the file, and is shared by all hard
/// links to the same file. The value -1 indicates no unique identifier is
/// available, and so it can be assumed the file has no hard links.
/// </remarks>
public long GetFileId(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
return dirEntry.HardLink == 0 ? -1 : (long)dirEntry.HardLink;
}
/// <summary>
/// Indicates whether the file is known by other names.
/// </summary>
/// <param name="path">The file to inspect</param>
/// <returns><c>true</c> if the file has other names, else <c>false</c></returns>
public bool HasHardLinks(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
return dirEntry.HardLink != 0;
}
/// <summary>
/// Indicates if a directory exists.
/// </summary>
/// <param name="path">The path to test</param>
/// <returns>true if the directory exists</returns>
public override bool DirectoryExists(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
return dirEntry != null && (dirEntry.Attributes & FileAttributes.Directory) != 0;
}
/// <summary>
/// Indicates if a file exists.
/// </summary>
/// <param name="path">The path to test</param>
/// <returns>true if the file exists</returns>
public override bool FileExists(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
return dirEntry != null && (dirEntry.Attributes & FileAttributes.Directory) == 0;
}
/// <summary>
/// Gets the names of subdirectories in a specified directory matching a specified
/// search pattern, using a value to determine whether to search subdirectories.
/// </summary>
/// <param name="path">The path to search.</param>
/// <param name="searchPattern">The search string to match against.</param>
/// <param name="searchOption">Indicates whether to search subdirectories.</param>
/// <returns>Array of directories matching the search pattern.</returns>
public override string[] GetDirectories(string path, string searchPattern, SearchOption searchOption)
{
Regex re = Utilities.ConvertWildcardsToRegEx(searchPattern);
List<string> dirs = new List<string>();
DoSearch(dirs, path, re, searchOption == SearchOption.AllDirectories, true, false);
return dirs.ToArray();
}
/// <summary>
/// Gets the names of files in a specified directory matching a specified
/// search pattern, using a value to determine whether to search subdirectories.
/// </summary>
/// <param name="path">The path to search.</param>
/// <param name="searchPattern">The search string to match against.</param>
/// <param name="searchOption">Indicates whether to search subdirectories.</param>
/// <returns>Array of files matching the search pattern.</returns>
public override string[] GetFiles(string path, string searchPattern, SearchOption searchOption)
{
Regex re = Utilities.ConvertWildcardsToRegEx(searchPattern);
List<string> results = new List<string>();
DoSearch(results, path, re, searchOption == SearchOption.AllDirectories, false, true);
return results.ToArray();
}
/// <summary>
/// Gets the names of all files and subdirectories in a specified directory.
/// </summary>
/// <param name="path">The path to search.</param>
/// <returns>Array of files and subdirectories matching the search pattern.</returns>
public override string[] GetFileSystemEntries(string path)
{
DirectoryEntry parentDirEntry = GetEntry(path);
if (parentDirEntry == null)
{
throw new DirectoryNotFoundException(string.Format(CultureInfo.InvariantCulture, "The directory '{0}' does not exist", path));
}
List<DirectoryEntry> parentDir = GetDirectory(parentDirEntry.SubdirOffset);
return Utilities.Map<DirectoryEntry, string>(parentDir, (m) => Utilities.CombinePaths(path, m.FileName));
}
/// <summary>
/// Gets the names of files and subdirectories in a specified directory matching a specified
/// search pattern.
/// </summary>
/// <param name="path">The path to search.</param>
/// <param name="searchPattern">The search string to match against.</param>
/// <returns>Array of files and subdirectories matching the search pattern.</returns>
public override string[] GetFileSystemEntries(string path, string searchPattern)
{
Regex re = Utilities.ConvertWildcardsToRegEx(searchPattern);
DirectoryEntry parentDirEntry = GetEntry(path);
if (parentDirEntry == null)
{
throw new DirectoryNotFoundException(string.Format(CultureInfo.InvariantCulture, "The directory '{0}' does not exist", path));
}
List<DirectoryEntry> parentDir = GetDirectory(parentDirEntry.SubdirOffset);
List<string> result = new List<string>();
foreach (DirectoryEntry dirEntry in parentDir)
{
if (re.IsMatch(dirEntry.FileName))
{
result.Add(Utilities.CombinePaths(path, dirEntry.FileName));
}
}
return result.ToArray();
}
/// <summary>
/// Opens the specified file.
/// </summary>
/// <param name="path">The full path of the file to open.</param>
/// <param name="mode">The file mode for the created stream.</param>
/// <param name="access">The access permissions for the created stream.</param>
/// <returns>The new stream.</returns>
public override SparseStream OpenFile(string path, FileMode mode, FileAccess access)
{
if (mode != FileMode.Open && mode != FileMode.OpenOrCreate)
{
throw new NotSupportedException("No write support for WIM files");
}
if (access != FileAccess.Read)
{
throw new NotSupportedException("No write support for WIM files");
}
byte[] streamHash = GetFileHash(path);
ShortResourceHeader hdr = _file.LocateResource(streamHash);
if (hdr == null)
{
if (Utilities.IsAllZeros(streamHash, 0, streamHash.Length))
{
return new ZeroStream(0);
}
throw new IOException("Unable to locate file contents");
}
return _file.OpenResourceStream(hdr);
}
/// <summary>
/// Gets the attributes of a file or directory.
/// </summary>
/// <param name="path">The file or directory to inspect</param>
/// <returns>The attributes of the file or directory</returns>
public override FileAttributes GetAttributes(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
if (dirEntry == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return dirEntry.Attributes;
}
/// <summary>
/// Gets the creation time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory.</param>
/// <returns>The creation time.</returns>
public override DateTime GetCreationTimeUtc(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
if (dirEntry == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return DateTime.FromFileTimeUtc(dirEntry.CreationTime);
}
/// <summary>
/// Gets the last access time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory</param>
/// <returns>The last access time</returns>
public override DateTime GetLastAccessTimeUtc(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
if (dirEntry == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return DateTime.FromFileTimeUtc(dirEntry.LastAccessTime);
}
/// <summary>
/// Gets the last modification time (in UTC) of a file or directory.
/// </summary>
/// <param name="path">The path of the file or directory</param>
/// <returns>The last write time</returns>
public override DateTime GetLastWriteTimeUtc(string path)
{
DirectoryEntry dirEntry = GetEntry(path);
if (dirEntry == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return DateTime.FromFileTimeUtc(dirEntry.LastWriteTime);
}
/// <summary>
/// Gets the length of a file.
/// </summary>
/// <param name="path">The path to the file</param>
/// <returns>The length in bytes</returns>
public override long GetFileLength(string path)
{
string filePart;
string altStreamPart;
SplitFileName(path, out filePart, out altStreamPart);
DirectoryEntry dirEntry = GetEntry(filePart);
if (dirEntry == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return dirEntry.GetLength(altStreamPart);
}
/// <summary>
/// Gets the SHA-1 hash of a file's contents.
/// </summary>
/// <param name="path">The path to the file</param>
/// <returns>The 160-bit hash</returns>
/// <remarks>The WIM file format internally stores the SHA-1 hash of files.
/// This method provides access to the stored hash. Callers can use this
/// value to compare against the actual hash of the byte stream to validate
/// the integrity of the file contents.</remarks>
public byte[] GetFileHash(string path)
{
string filePart;
string altStreamPart;
SplitFileName(path, out filePart, out altStreamPart);
DirectoryEntry dirEntry = GetEntry(filePart);
if (dirEntry == null)
{
throw new FileNotFoundException("No such file or directory", path);
}
return dirEntry.GetStreamHash(altStreamPart);
}
/// <summary>
/// Disposes of this instance.
/// </summary>
/// <param name="disposing"><c>true</c> if disposing, else <c>false</c></param>
protected override void Dispose(bool disposing)
{
try
{
_metaDataStream.Dispose();
_metaDataStream = null;
_file = null;
}
finally
{
base.Dispose(disposing);
}
}
private static void SplitFileName(string path, out string filePart, out string altStreamPart)
{
int streamSepPos = path.IndexOf(":", StringComparison.Ordinal);
if (streamSepPos >= 0)
{
filePart = path.Substring(0, streamSepPos);
altStreamPart = path.Substring(streamSepPos + 1);
}
else
{
filePart = path;
altStreamPart = string.Empty;
}
}
private List<DirectoryEntry> GetDirectory(long id)
{
List<DirectoryEntry> dir = _dirCache[id];
if (dir == null)
{
_metaDataStream.Position = (id == 0) ? _rootDirPos : id;
LittleEndianDataReader reader = new LittleEndianDataReader(_metaDataStream);
dir = new List<DirectoryEntry>();
DirectoryEntry entry = DirectoryEntry.ReadFrom(reader);
while (entry != null)
{
dir.Add(entry);
entry = DirectoryEntry.ReadFrom(reader);
}
_dirCache[id] = dir;
}
return dir;
}
private void ReadSecurityDescriptors()
{
LittleEndianDataReader reader = new LittleEndianDataReader(_metaDataStream);
long startPos = reader.Position;
uint totalLength = reader.ReadUInt32();
uint numEntries = reader.ReadUInt32();
ulong[] sdLengths = new ulong[numEntries];
for (uint i = 0; i < numEntries; ++i)
{
sdLengths[i] = reader.ReadUInt64();
}
_securityDescriptors = new List<RawSecurityDescriptor>((int)numEntries);
for (uint i = 0; i < numEntries; ++i)
{
_securityDescriptors.Add(new RawSecurityDescriptor(reader.ReadBytes((int)sdLengths[i]), 0));
}
if (reader.Position < startPos + totalLength)
{
reader.Skip((int)(startPos + totalLength - reader.Position));
}
_rootDirPos = Utilities.RoundUp(startPos + totalLength, 8);
}
private DirectoryEntry GetEntry(string path)
{
if (path.EndsWith(@"\", StringComparison.Ordinal))
{
path = path.Substring(0, path.Length - 1);
}
if (!string.IsNullOrEmpty(path) && !path.StartsWith(@"\", StringComparison.OrdinalIgnoreCase))
{
path = @"\" + path;
}
return GetEntry(GetDirectory(0), path.Split('\\'));
}
private DirectoryEntry GetEntry(List<DirectoryEntry> dir, string[] path)
{
List<DirectoryEntry> currentDir = dir;
DirectoryEntry nextEntry = null;
for (int i = 0; i < path.Length; ++i)
{
nextEntry = null;
foreach (var entry in currentDir)
{
if (path[i].Equals(entry.FileName, StringComparison.OrdinalIgnoreCase)
|| (!string.IsNullOrEmpty(entry.ShortName) && path[i].Equals(entry.ShortName, StringComparison.OrdinalIgnoreCase)))
{
nextEntry = entry;
break;
}
}
if (nextEntry == null)
{
return null;
}
else if (nextEntry.SubdirOffset != 0)
{
currentDir = GetDirectory(nextEntry.SubdirOffset);
}
}
return nextEntry;
}
private void DoSearch(List<string> results, string path, Regex regex, bool subFolders, bool dirs, bool files)
{
DirectoryEntry parentDirEntry = GetEntry(path);
if (parentDirEntry.SubdirOffset == 0)
{
return;
}
List<DirectoryEntry> parentDir = GetDirectory(parentDirEntry.SubdirOffset);
foreach (DirectoryEntry de in parentDir)
{
bool isDir = (de.Attributes & FileAttributes.Directory) != 0;
if ((isDir && dirs) || (!isDir && files))
{
if (regex.IsMatch(de.SearchName))
{
results.Add(Utilities.CombinePaths(path, de.FileName));
}
}
if (subFolders && isDir)
{
DoSearch(results, Utilities.CombinePaths(path, de.FileName), regex, subFolders, dirs, files);
}
}
}
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace BuildIt.CognitiveServices
{
using System.Threading.Tasks;
/// <summary>
/// Extension methods for ContentModeratorReview.
/// </summary>
public static partial class ContentModeratorReviewExtensions
{
/// <summary>
/// Returns review details for the review Id passed.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// </param>
/// <param name='reviewId'>
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void ReviewGet(this IContentModeratorReview operations, string teamName, string reviewId, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).ReviewGetAsync(teamName, reviewId, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns review details for the review Id passed.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// </param>
/// <param name='reviewId'>
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task ReviewGetAsync(this IContentModeratorReview operations, string teamName, string reviewId, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.ReviewGetWithHttpMessagesAsync(teamName, reviewId, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get the Job Details for a Job Id.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// </param>
/// <param name='jobId'>
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void JobGet(this IContentModeratorReview operations, string teamName, string jobId, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).JobGetAsync(teamName, jobId, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the Job Details for a Job Id.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// </param>
/// <param name='jobId'>
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task JobGetAsync(this IContentModeratorReview operations, string teamName, string jobId, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.JobGetWithHttpMessagesAsync(teamName, jobId, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// The reviews created would show up for Reviewers on your team. As Reviewers
/// complete reviewing, results of the Review would be POSTED (i.e. HTTP
/// POST) on the specified CallBackEndpoint.
///
/// <h3>CallBack Schemas </h3>
/// <h4>Review Completion CallBack Sample</h4>
/// <p>
/// {<br/>
/// "ReviewId": "<Review Id>",<br/>
/// "ModifiedOn": "2016-10-11T22:36:32.9934851Z",<br/>
/// "ModifiedBy": "<Name of the Reviewer>",<br/>
/// "CallBackType": "Review",<br/>
/// "ContentId": "<The ContentId that was specified
/// input>",<br/>
/// "Metadata": {<br/>
/// "adultscore": "0.xxx",<br/>
/// "a": "False",<br/>
/// "racyscore": "0.xxx",<br/>
/// "r": "True"<br/>
/// },<br/>
/// "ReviewerResultTags": {<br/>
/// "a": "False",<br/>
/// "r": "True"<br/>
/// }<br/>
/// }<br/>
///
/// </p>
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// Your Team Name
/// </param>
/// <param name='subTeam'>
/// Optional paramter used to specify the Sub Team for this review
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void ReviewCreate(this IContentModeratorReview operations, string teamName, string subTeam = default(string), string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).ReviewCreateAsync(teamName, subTeam, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The reviews created would show up for Reviewers on your team. As Reviewers
/// complete reviewing, results of the Review would be POSTED (i.e. HTTP
/// POST) on the specified CallBackEndpoint.
///
/// <h3>CallBack Schemas </h3>
/// <h4>Review Completion CallBack Sample</h4>
/// <p>
/// {<br/>
/// "ReviewId": "<Review Id>",<br/>
/// "ModifiedOn": "2016-10-11T22:36:32.9934851Z",<br/>
/// "ModifiedBy": "<Name of the Reviewer>",<br/>
/// "CallBackType": "Review",<br/>
/// "ContentId": "<The ContentId that was specified
/// input>",<br/>
/// "Metadata": {<br/>
/// "adultscore": "0.xxx",<br/>
/// "a": "False",<br/>
/// "racyscore": "0.xxx",<br/>
/// "r": "True"<br/>
/// },<br/>
/// "ReviewerResultTags": {<br/>
/// "a": "False",<br/>
/// "r": "True"<br/>
/// }<br/>
/// }<br/>
///
/// </p>
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// Your Team Name
/// </param>
/// <param name='subTeam'>
/// Optional paramter used to specify the Sub Team for this review
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task ReviewCreateAsync(this IContentModeratorReview operations, string teamName, string subTeam = default(string), string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.ReviewCreateWithHttpMessagesAsync(teamName, subTeam, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// A job Id will be returned for the Image content posted on this endpoint.
///
/// Once the content is evaluated against the Workflow provided the review
/// will be created or ignored based on the workflow expression.
///
/// <h3>CallBack Schemas </h3>
///
/// <p>
/// <h4>Job Completion CallBack Sample</h4><br/>
///
/// {<br/>
/// "JobId": "<Job Id>,<br/>
/// "ReviewId": "<Review Id, if the Job resulted in a Review to be
/// created>",<br/>
/// "WorkFlowId": "default",<br/>
/// "Status": "<This will be one of Complete, InProgress,
/// Error>",<br/>
/// "ContentType": "Image",<br/>
/// "ContentId": "<This is the ContentId that was specified on
/// input>",<br/>
/// "CallBackType": "Job",<br/>
/// "Metadata": {<br/>
/// "adultscore": "0.xxx",<br/>
/// "a": "False",<br/>
/// "racyscore": "0.xxx",<br/>
/// "r": "True"<br/>
/// }<br/>
/// }<br/>
///
/// </p>
/// <p>
/// <h4>Review Completion CallBack Sample</h4><br/>
///
/// {
/// "ReviewId": "<Review Id>",<br/>
/// "ModifiedOn": "2016-10-11T22:36:32.9934851Z",<br/>
/// "ModifiedBy": "<Name of the Reviewer>",<br/>
/// "CallBackType": "Review",<br/>
/// "ContentId": "<The ContentId that was specified
/// input>",<br/>
/// "Metadata": {<br/>
/// "adultscore": "0.xxx",
/// "a": "False",<br/>
/// "racyscore": "0.xxx",<br/>
/// "r": "True"<br/>
/// },<br/>
/// "ReviewerResultTags": {<br/>
/// "a": "False",<br/>
/// "r": "True"<br/>
/// }<br/>
/// }<br/>
///
/// </p>
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// Your team name
/// </param>
/// <param name='contentId'>
/// Content Id/Name
/// </param>
/// <param name='workflowName'>
/// Workflow Name, if left empty your teams default workflow would be used
/// </param>
/// <param name='callBackEndpoint'>
/// Callback endpoint for posting the reviews result.
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void JobCreate(this IContentModeratorReview operations, string teamName, string contentId, string workflowName, string callBackEndpoint = default(string), string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).JobCreateAsync(teamName, contentId, workflowName, callBackEndpoint, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// A job Id will be returned for the Image content posted on this endpoint.
///
/// Once the content is evaluated against the Workflow provided the review
/// will be created or ignored based on the workflow expression.
///
/// <h3>CallBack Schemas </h3>
///
/// <p>
/// <h4>Job Completion CallBack Sample</h4><br/>
///
/// {<br/>
/// "JobId": "<Job Id>,<br/>
/// "ReviewId": "<Review Id, if the Job resulted in a Review to be
/// created>",<br/>
/// "WorkFlowId": "default",<br/>
/// "Status": "<This will be one of Complete, InProgress,
/// Error>",<br/>
/// "ContentType": "Image",<br/>
/// "ContentId": "<This is the ContentId that was specified on
/// input>",<br/>
/// "CallBackType": "Job",<br/>
/// "Metadata": {<br/>
/// "adultscore": "0.xxx",<br/>
/// "a": "False",<br/>
/// "racyscore": "0.xxx",<br/>
/// "r": "True"<br/>
/// }<br/>
/// }<br/>
///
/// </p>
/// <p>
/// <h4>Review Completion CallBack Sample</h4><br/>
///
/// {
/// "ReviewId": "<Review Id>",<br/>
/// "ModifiedOn": "2016-10-11T22:36:32.9934851Z",<br/>
/// "ModifiedBy": "<Name of the Reviewer>",<br/>
/// "CallBackType": "Review",<br/>
/// "ContentId": "<The ContentId that was specified
/// input>",<br/>
/// "Metadata": {<br/>
/// "adultscore": "0.xxx",
/// "a": "False",<br/>
/// "racyscore": "0.xxx",<br/>
/// "r": "True"<br/>
/// },<br/>
/// "ReviewerResultTags": {<br/>
/// "a": "False",<br/>
/// "r": "True"<br/>
/// }<br/>
/// }<br/>
///
/// </p>
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='teamName'>
/// Your team name
/// </param>
/// <param name='contentId'>
/// Content Id/Name
/// </param>
/// <param name='workflowName'>
/// Workflow Name, if left empty your teams default workflow would be used
/// </param>
/// <param name='callBackEndpoint'>
/// Callback endpoint for posting the reviews result.
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task JobCreateAsync(this IContentModeratorReview operations, string teamName, string contentId, string workflowName, string callBackEndpoint = default(string), string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.JobCreateWithHttpMessagesAsync(teamName, contentId, workflowName, callBackEndpoint, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get all the Workflows available for you Team
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='team'>
/// Your Team name
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void WorkflowGetAll(this IContentModeratorReview operations, string team, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).WorkflowGetAllAsync(team, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get all the Workflows available for you Team
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='team'>
/// Your Team name
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task WorkflowGetAllAsync(this IContentModeratorReview operations, string team, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.WorkflowGetAllWithHttpMessagesAsync(team, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get the details of a specific Workflow on your Team
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='team'>
/// Your Team name
/// </param>
/// <param name='workflowname'>
/// Provide a name for this workflow
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void WorkflowGet(this IContentModeratorReview operations, string team, string workflowname, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).WorkflowGetAsync(team, workflowname, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the details of a specific Workflow on your Team
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='team'>
/// Your Team name
/// </param>
/// <param name='workflowname'>
/// Provide a name for this workflow
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task WorkflowGetAsync(this IContentModeratorReview operations, string team, string workflowname, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.WorkflowGetWithHttpMessagesAsync(team, workflowname, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Create a new workflow or update an existing one.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='team'>
/// Your Team name
/// </param>
/// <param name='workflowname'>
/// Provide a name for this workflow
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
public static void WorkflowCreateOrUpdate(this IContentModeratorReview operations, string team, string workflowname, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string))
{
System.Threading.Tasks.Task.Factory.StartNew(s => ((IContentModeratorReview)s).WorkflowCreateOrUpdateAsync(team, workflowname, subscriptionKey, ocpApimSubscriptionKey), operations, System.Threading.CancellationToken.None, System.Threading.Tasks.TaskCreationOptions.None, System.Threading.Tasks.TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Create a new workflow or update an existing one.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='team'>
/// Your Team name
/// </param>
/// <param name='workflowname'>
/// Provide a name for this workflow
/// </param>
/// <param name='subscriptionKey'>
/// subscription key in url
/// </param>
/// <param name='ocpApimSubscriptionKey'>
/// subscription key in header
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async System.Threading.Tasks.Task WorkflowCreateOrUpdateAsync(this IContentModeratorReview operations, string team, string workflowname, string subscriptionKey = default(string), string ocpApimSubscriptionKey = default(string), System.Threading.CancellationToken cancellationToken = default(System.Threading.CancellationToken))
{
await operations.WorkflowCreateOrUpdateWithHttpMessagesAsync(team, workflowname, subscriptionKey, ocpApimSubscriptionKey, null, cancellationToken).ConfigureAwait(false);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
namespace System.Reflection.Metadata.Ecma335
{
internal enum MetadataStreamKind
{
Illegal,
Compressed,
Uncompressed,
}
[Flags]
internal enum TableMask : ulong
{
Module = 1UL << TableIndex.Module,
TypeRef = 1UL << TableIndex.TypeRef,
TypeDef = 1UL << TableIndex.TypeDef,
FieldPtr = 1UL << TableIndex.FieldPtr,
Field = 1UL << TableIndex.Field,
MethodPtr = 1UL << TableIndex.MethodPtr,
MethodDef = 1UL << TableIndex.MethodDef,
ParamPtr = 1UL << TableIndex.ParamPtr,
Param = 1UL << TableIndex.Param,
InterfaceImpl = 1UL << TableIndex.InterfaceImpl,
MemberRef = 1UL << TableIndex.MemberRef,
Constant = 1UL << TableIndex.Constant,
CustomAttribute = 1UL << TableIndex.CustomAttribute,
FieldMarshal = 1UL << TableIndex.FieldMarshal,
DeclSecurity = 1UL << TableIndex.DeclSecurity,
ClassLayout = 1UL << TableIndex.ClassLayout,
FieldLayout = 1UL << TableIndex.FieldLayout,
StandAloneSig = 1UL << TableIndex.StandAloneSig,
EventMap = 1UL << TableIndex.EventMap,
EventPtr = 1UL << TableIndex.EventPtr,
Event = 1UL << TableIndex.Event,
PropertyMap = 1UL << TableIndex.PropertyMap,
PropertyPtr = 1UL << TableIndex.PropertyPtr,
Property = 1UL << TableIndex.Property,
MethodSemantics = 1UL << TableIndex.MethodSemantics,
MethodImpl = 1UL << TableIndex.MethodImpl,
ModuleRef = 1UL << TableIndex.ModuleRef,
TypeSpec = 1UL << TableIndex.TypeSpec,
ImplMap = 1UL << TableIndex.ImplMap,
FieldRva = 1UL << TableIndex.FieldRva,
EnCLog = 1UL << TableIndex.EncLog,
EnCMap = 1UL << TableIndex.EncMap,
Assembly = 1UL << TableIndex.Assembly,
// AssemblyProcessor = 1UL << TableIndices.AssemblyProcessor,
// AssemblyOS = 1UL << TableIndices.AssemblyOS,
AssemblyRef = 1UL << TableIndex.AssemblyRef,
// AssemblyRefProcessor = 1UL << TableIndices.AssemblyRefProcessor,
// AssemblyRefOS = 1UL << TableIndices.AssemblyRefOS,
File = 1UL << TableIndex.File,
ExportedType = 1UL << TableIndex.ExportedType,
ManifestResource = 1UL << TableIndex.ManifestResource,
NestedClass = 1UL << TableIndex.NestedClass,
GenericParam = 1UL << TableIndex.GenericParam,
MethodSpec = 1UL << TableIndex.MethodSpec,
GenericParamConstraint = 1UL << TableIndex.GenericParamConstraint,
Document = 1UL << TableIndex.Document,
MethodDebugInformation = 1UL << TableIndex.MethodDebugInformation,
LocalScope = 1UL << TableIndex.LocalScope,
LocalVariable = 1UL << TableIndex.LocalVariable,
LocalConstant = 1UL << TableIndex.LocalConstant,
ImportScope = 1UL << TableIndex.ImportScope,
StateMachineMethod = 1UL << TableIndex.StateMachineMethod,
CustomDebugInformation = 1UL << TableIndex.CustomDebugInformation,
PtrTables =
FieldPtr
| MethodPtr
| ParamPtr
| EventPtr
| PropertyPtr,
EncTables =
EnCLog
| EnCMap,
TypeSystemTables =
PtrTables
| EncTables
| Module
| TypeRef
| TypeDef
| Field
| MethodDef
| Param
| InterfaceImpl
| MemberRef
| Constant
| CustomAttribute
| FieldMarshal
| DeclSecurity
| ClassLayout
| FieldLayout
| StandAloneSig
| EventMap
| Event
| PropertyMap
| Property
| MethodSemantics
| MethodImpl
| ModuleRef
| TypeSpec
| ImplMap
| FieldRva
| Assembly
| AssemblyRef
| File
| ExportedType
| ManifestResource
| NestedClass
| GenericParam
| MethodSpec
| GenericParamConstraint,
DebugTables =
Document
| MethodDebugInformation
| LocalScope
| LocalVariable
| LocalConstant
| ImportScope
| StateMachineMethod
| CustomDebugInformation,
AllTables =
TypeSystemTables |
DebugTables,
ValidPortablePdbExternalTables =
TypeSystemTables & ~PtrTables & ~EncTables
}
internal enum HeapSizes : byte
{
StringHeapLarge = 0x01, // 4 byte uint indexes used for string heap offsets
GuidHeapLarge = 0x02, // 4 byte uint indexes used for GUID heap offsets
BlobHeapLarge = 0x04, // 4 byte uint indexes used for Blob heap offsets
ExtraData = 0x40, // Indicates that there is an extra 4 bytes of data immediately after the row counts
}
internal enum StringKind : byte
{
Plain = (byte)(StringHandleType.String >> HeapHandleType.OffsetBitCount),
Virtual = (byte)(StringHandleType.VirtualString >> HeapHandleType.OffsetBitCount),
WinRTPrefixed = (byte)(StringHandleType.WinRTPrefixedString >> HeapHandleType.OffsetBitCount),
DotTerminated = (byte)(StringHandleType.DotTerminatedString >> HeapHandleType.OffsetBitCount),
}
internal static class StringHandleType
{
// The 3 high bits above the offset that specify the full string type (including virtual bit)
internal const uint TypeMask = ~(HeapHandleType.OffsetMask);
// The string type bits excluding the virtual bit.
internal const uint NonVirtualTypeMask = TypeMask & ~(HeapHandleType.VirtualBit);
// NUL-terminated UTF8 string on a #String heap.
internal const uint String = (0 << HeapHandleType.OffsetBitCount);
// String on #String heap whose terminator is NUL and '.', whichever comes first.
internal const uint DotTerminatedString = (1 << HeapHandleType.OffsetBitCount);
// Reserved values that can be used for future strings:
internal const uint ReservedString1 = (2 << HeapHandleType.OffsetBitCount);
internal const uint ReservedString2 = (3 << HeapHandleType.OffsetBitCount);
// Virtual string identified by a virtual index
internal const uint VirtualString = HeapHandleType.VirtualBit | (0 << HeapHandleType.OffsetBitCount);
// Virtual string whose value is a "<WinRT>" prefixed string found at the specified heap offset.
internal const uint WinRTPrefixedString = HeapHandleType.VirtualBit | (1 << HeapHandleType.OffsetBitCount);
// Reserved virtual strings that can be used in future:
internal const uint ReservedVirtualString1 = HeapHandleType.VirtualBit | (2 << HeapHandleType.OffsetBitCount);
internal const uint ReservedVirtualString2 = HeapHandleType.VirtualBit | (3 << HeapHandleType.OffsetBitCount);
}
internal static class HeapHandleType
{
// Heap offset values are limited to 29 bits (max compressed integer)
internal const int OffsetBitCount = 29;
internal const uint OffsetMask = (1 << OffsetBitCount) - 1;
internal const uint VirtualBit = 0x80000000;
internal static bool IsValidHeapOffset(uint offset)
{
return (offset & ~OffsetMask) == 0;
}
}
/// <summary>
/// These constants are all in the byte range and apply to the interpretation of <see cref="Handle.VType"/>,
/// </summary>
internal static class HandleType
{
internal const uint Module = (uint)TableIndex.Module;
internal const uint TypeRef = (uint)TableIndex.TypeRef;
internal const uint TypeDef = (uint)TableIndex.TypeDef;
internal const uint FieldDef = (uint)TableIndex.Field;
internal const uint MethodDef = (uint)TableIndex.MethodDef;
internal const uint ParamDef = (uint)TableIndex.Param;
internal const uint InterfaceImpl = (uint)TableIndex.InterfaceImpl;
internal const uint MemberRef = (uint)TableIndex.MemberRef;
internal const uint Constant = (uint)TableIndex.Constant;
internal const uint CustomAttribute = (uint)TableIndex.CustomAttribute;
internal const uint DeclSecurity = (uint)TableIndex.DeclSecurity;
internal const uint Signature = (uint)TableIndex.StandAloneSig;
internal const uint EventMap = (uint)TableIndex.EventMap;
internal const uint Event = (uint)TableIndex.Event;
internal const uint PropertyMap = (uint)TableIndex.PropertyMap;
internal const uint Property = (uint)TableIndex.Property;
internal const uint MethodSemantics = (uint)TableIndex.MethodSemantics;
internal const uint MethodImpl = (uint)TableIndex.MethodImpl;
internal const uint ModuleRef = (uint)TableIndex.ModuleRef;
internal const uint TypeSpec = (uint)TableIndex.TypeSpec;
internal const uint Assembly = (uint)TableIndex.Assembly;
internal const uint AssemblyRef = (uint)TableIndex.AssemblyRef;
internal const uint File = (uint)TableIndex.File;
internal const uint ExportedType = (uint)TableIndex.ExportedType;
internal const uint ManifestResource = (uint)TableIndex.ManifestResource;
internal const uint NestedClass = (uint)TableIndex.NestedClass;
internal const uint GenericParam = (uint)TableIndex.GenericParam;
internal const uint MethodSpec = (uint)TableIndex.MethodSpec;
internal const uint GenericParamConstraint = (uint)TableIndex.GenericParamConstraint;
// debug tables:
internal const uint Document = (uint)TableIndex.Document;
internal const uint MethodDebugInformation = (uint)TableIndex.MethodDebugInformation;
internal const uint LocalScope = (uint)TableIndex.LocalScope;
internal const uint LocalVariable = (uint)TableIndex.LocalVariable;
internal const uint LocalConstant = (uint)TableIndex.LocalConstant;
internal const uint ImportScope = (uint)TableIndex.ImportScope;
internal const uint AsyncMethod = (uint)TableIndex.StateMachineMethod;
internal const uint CustomDebugInformation = (uint)TableIndex.CustomDebugInformation;
internal const uint UserString = 0x70; // #UserString heap
// The following values never appear in a token stored in metadata,
// they are just helper values to identify the type of a handle.
// Note, however, that even though they do not come from the spec,
// they are surfaced as public constants via HandleKind enum and
// therefore cannot change!
internal const uint Blob = 0x71; // #Blob heap
internal const uint Guid = 0x72; // #Guid heap
// #String heap and its modifications
//
// Multiple values are reserved for string handles so that we can encode special
// handling with more than just the virtual bit. See StringHandleType for how
// the two extra bits are actually interpreted. The extra String1,2,3 values here are
// not used directly, but serve as a reminder that they are not available for use
// by another handle type.
internal const uint String = 0x78;
internal const uint String1 = 0x79;
internal const uint String2 = 0x7a;
internal const uint String3 = 0x7b;
// Namespace handles also have offsets into the #String heap (when non-virtual)
// to their full name. However, this is an implementation detail and they are
// surfaced with first-class HandleKind.Namespace and strongly-typed NamespaceHandle.
internal const uint Namespace = 0x7c;
internal const uint HeapMask = 0x70;
internal const uint TypeMask = 0x7F;
/// <summary>
/// Use the highest bit to mark tokens that are virtual (synthesized).
/// We create virtual tokens to represent projected WinMD entities.
/// </summary>
internal const uint VirtualBit = 0x80;
/// <summary>
/// In the case of string handles, the two lower bits that (in addition to the
/// virtual bit not included in this mask) encode how to obtain the string value.
/// </summary>
internal const uint NonVirtualStringTypeMask = 0x03;
}
internal static class TokenTypeIds
{
internal const uint Module = HandleType.Module << RowIdBitCount;
internal const uint TypeRef = HandleType.TypeRef << RowIdBitCount;
internal const uint TypeDef = HandleType.TypeDef << RowIdBitCount;
internal const uint FieldDef = HandleType.FieldDef << RowIdBitCount;
internal const uint MethodDef = HandleType.MethodDef << RowIdBitCount;
internal const uint ParamDef = HandleType.ParamDef << RowIdBitCount;
internal const uint InterfaceImpl = HandleType.InterfaceImpl << RowIdBitCount;
internal const uint MemberRef = HandleType.MemberRef << RowIdBitCount;
internal const uint Constant = HandleType.Constant << RowIdBitCount;
internal const uint CustomAttribute = HandleType.CustomAttribute << RowIdBitCount;
internal const uint DeclSecurity = HandleType.DeclSecurity << RowIdBitCount;
internal const uint Signature = HandleType.Signature << RowIdBitCount;
internal const uint EventMap = HandleType.EventMap << RowIdBitCount;
internal const uint Event = HandleType.Event << RowIdBitCount;
internal const uint PropertyMap = HandleType.PropertyMap << RowIdBitCount;
internal const uint Property = HandleType.Property << RowIdBitCount;
internal const uint MethodSemantics = HandleType.MethodSemantics << RowIdBitCount;
internal const uint MethodImpl = HandleType.MethodImpl << RowIdBitCount;
internal const uint ModuleRef = HandleType.ModuleRef << RowIdBitCount;
internal const uint TypeSpec = HandleType.TypeSpec << RowIdBitCount;
internal const uint Assembly = HandleType.Assembly << RowIdBitCount;
internal const uint AssemblyRef = HandleType.AssemblyRef << RowIdBitCount;
internal const uint File = HandleType.File << RowIdBitCount;
internal const uint ExportedType = HandleType.ExportedType << RowIdBitCount;
internal const uint ManifestResource = HandleType.ManifestResource << RowIdBitCount;
internal const uint NestedClass = HandleType.NestedClass << RowIdBitCount;
internal const uint GenericParam = HandleType.GenericParam << RowIdBitCount;
internal const uint MethodSpec = HandleType.MethodSpec << RowIdBitCount;
internal const uint GenericParamConstraint = HandleType.GenericParamConstraint << RowIdBitCount;
// debug tables:
internal const uint Document = HandleType.Document << RowIdBitCount;
internal const uint MethodDebugInformation = HandleType.MethodDebugInformation << RowIdBitCount;
internal const uint LocalScope = HandleType.LocalScope << RowIdBitCount;
internal const uint LocalVariable = HandleType.LocalVariable << RowIdBitCount;
internal const uint LocalConstant = HandleType.LocalConstant << RowIdBitCount;
internal const uint ImportScope = HandleType.ImportScope << RowIdBitCount;
internal const uint AsyncMethod = HandleType.AsyncMethod << RowIdBitCount;
internal const uint CustomDebugInformation = HandleType.CustomDebugInformation << RowIdBitCount;
internal const uint UserString = HandleType.UserString << RowIdBitCount;
internal const int RowIdBitCount = 24;
internal const uint RIDMask = (1 << RowIdBitCount) - 1;
internal const uint TypeMask = HandleType.TypeMask << RowIdBitCount;
/// <summary>
/// Use the highest bit to mark tokens that are virtual (synthesized).
/// We create virtual tokens to represent projected WinMD entities.
/// </summary>
internal const uint VirtualBit = 0x80000000;
/// <summary>
/// Returns true if the token value can escape the metadata reader.
/// We don't allow virtual tokens and heap tokens other than UserString to escape
/// since the token type ids are internal to the reader and not specified by ECMA spec.
///
/// Spec (Partition III, 1.9 Metadata tokens):
/// Many CIL instructions are followed by a "metadata token". This is a 4-byte value, that specifies a row in a
/// metadata table, or a starting byte offset in the User String heap.
///
/// For example, a value of 0x02 specifies the TypeDef table; a value of 0x70 specifies the User
/// String heap.The value corresponds to the number assigned to that metadata table (see Partition II for the full
/// list of tables) or to 0x70 for the User String heap.The least-significant 3 bytes specify the target row within that
/// metadata table, or starting byte offset within the User String heap.
/// </summary>
internal static bool IsEntityOrUserStringToken(uint vToken)
{
return (vToken & TypeMask) <= UserString;
}
internal static bool IsEntityToken(uint vToken)
{
return (vToken & TypeMask) < UserString;
}
internal static bool IsValidRowId(uint rowId)
{
return (rowId & ~RIDMask) == 0;
}
internal static bool IsValidRowId(int rowId)
{
return (rowId & ~RIDMask) == 0;
}
}
}
| |
/*
** $Id: luac.c,v 1.54 2006/06/02 17:37:11 lhf Exp $
** Lua compiler (saves bytecodes to files; also list bytecodes)
** See Copyright Notice in lua.h
*/
using System;
using System.Collections.Generic;
using System.Text;
using System.IO;
using System.Reflection;
namespace SharpLua
{
using Instruction = System.UInt32;
public class Luac
{
//#include <errno.h>
//#include <stdio.h>
//#include <stdlib.h>
//#include <string.h>
//#define luac_c
//#define LUA_CORE
//#include "lua.h"
//#include "lauxlib.h"
//#include "ldo.h"
//#include "lfunc.h"
//#include "lmem.h"
//#include "lobject.h"
//#include "lopcodes.h"
//#include "lstring.h"
//#include "lundump.h"
static Lua.CharPtr PROGNAME = "luac"; /* default program name */
static Lua.CharPtr OUTPUT = PROGNAME + ".out"; /* default output file */
static int listing = 0; /* list bytecodes? */
static int dumping = 1; /* dump bytecodes? */
static int stripping = 0; /* strip debug information? */
static Lua.CharPtr Output = OUTPUT; /* default output file name */
static Lua.CharPtr output = Output; /* actual output file name */
static Lua.CharPtr progname = PROGNAME; /* actual program name */
static void fatal(Lua.CharPtr message)
{
Lua.fprintf(Lua.stderr, "%s: %s\n", progname, message);
Environment.Exit(Lua.EXIT_FAILURE);
}
static void cannot(Lua.CharPtr what)
{
Lua.fprintf(Lua.stderr, "%s: cannot %s %s: %s\n", progname, what, output, Lua.strerror(Lua.errno()));
Environment.Exit(Lua.EXIT_FAILURE);
}
static void usage(Lua.CharPtr message)
{
if (message[0] == '-')
Lua.fprintf(Lua.stderr, "%s: unrecognized option " + Lua.LUA_QS + "\n", progname, message);
else
Lua.fprintf(Lua.stderr, "%s: %s\n", progname, message);
Lua.fprintf(Lua.stderr,
"usage: %s [options] [filenames].\n" +
"Available options are:\n" +
" - process stdin\n" +
" -l list\n" +
" -o name output to file " + Lua.LUA_QL("name") + " (default is \"%s\")\n" +
" -p parse only\n" +
" -s strip debug information\n" +
" -v show version information\n" +
" -- stop handling options\n",
progname, Output);
Environment.Exit(Lua.EXIT_FAILURE);
}
//#define IS(s) (strcmp(argv[i],s)==0)
static int doargs(int argc, string[] argv)
{
int i;
int version = 0;
if ((argv.Length > 0) && (argv[0] != "")) progname = argv[0];
for (i = 1; i < argc; i++)
{
if (argv[i][0] != '-') /* end of options; keep it */
break;
else if (Lua.strcmp(argv[i], "--") == 0) /* end of options; skip it */
{
++i;
if (version != 0) ++version;
break;
}
else if (Lua.strcmp(argv[i], "-") == 0) /* end of options; use stdin */
break;
else if (Lua.strcmp(argv[i], "-l") == 0) /* list */
++listing;
else if (Lua.strcmp(argv[i], "-o") == 0) /* output file */
{
output = argv[++i];
if (output == null || (output[0] == 0)) usage(Lua.LUA_QL("-o") + " needs argument");
if (Lua.strcmp(argv[i], "-") == 0) output = null;
}
else if (Lua.strcmp(argv[i], "-p") == 0) /* parse only */
dumping = 0;
else if (Lua.strcmp(argv[i], "-s") == 0) /* strip debug information */
stripping = 1;
else if (Lua.strcmp(argv[i], "-v") == 0) /* show version */
++version;
else /* unknown option */
usage(argv[i]);
}
if (i == argc && ((listing != 0) || (dumping == 0)))
{
dumping = 0;
argv[--i] = Output.ToString();
}
if (version != 0)
{
Lua.printf("%s %s\n", Lua.LUA_RELEASE, Lua.LUA_COPYRIGHT);
if (version == argc - 1) Environment.Exit(Lua.EXIT_SUCCESS);
}
return i;
}
static Lua.Proto toproto(Lua.lua_State L, int i)
{
return Lua.clvalue(L.top + (i)).l.p;
}
static Lua.Proto combine(Lua.lua_State L, int n)
{
if (n == 1)
return toproto(L, -1);
else
{
int i, pc;
Lua.Proto f = Lua.luaF_newproto(L);
Lua.setptvalue2s(L, L.top, f); Lua.incr_top(L);
f.source = Lua.luaS_newliteral(L, "=(" + PROGNAME + ")");
f.maxstacksize = 1;
pc = 2 * n + 1;
f.code = (Instruction[])Lua.luaM_newvector<Instruction>(L, pc);
f.sizecode = pc;
f.p = Lua.luaM_newvector<Lua.Proto>(L, n);
f.sizep = n;
pc = 0;
for (i = 0; i < n; i++)
{
f.p[i] = toproto(L, i - n - 1);
f.code[pc++] = (uint)Lua.CREATE_ABx(Lua.OpCode.OP_CLOSURE, 0, i);
f.code[pc++] = (uint)Lua.CREATE_ABC(Lua.OpCode.OP_CALL, 0, 1, 1);
}
f.code[pc++] = (uint)Lua.CREATE_ABC(Lua.OpCode.OP_RETURN, 0, 1, 0);
return f;
}
}
static int writer(Lua.lua_State L, Lua.CharPtr p, uint size, object u)
{
//UNUSED(L);
return ((Lua.fwrite(p, (int)size, 1, (Stream)u) != 1) && (size != 0)) ? 1 : 0;
}
public class Smain
{
public int argc;
public string[] argv;
};
static int pmain(Lua.lua_State L)
{
Smain s = (Smain)Lua.lua_touserdata(L, 1);
int argc = s.argc;
string[] argv = s.argv;
Lua.Proto f;
int i;
if (Lua.lua_checkstack(L, argc) == 0) fatal("too many input files");
for (i = 0; i < argc; i++)
{
Lua.CharPtr filename = (Lua.strcmp(argv[i], "-") == 0) ? null : argv[i];
if (Lua.luaL_loadfile(L, filename) != 0) fatal(Lua.lua_tostring(L, -1));
}
f = combine(L, argc);
if (listing != 0) Lua.luaU_print(f, (listing > 1) ? 1 : 0);
if (dumping != 0)
{
Stream D = (output == null) ? Lua.stdout : Lua.fopen(output, "wb");
if (D == null) cannot("open");
Lua.lua_lock(L);
Lua.luaU_dump(L, f, writer, D, stripping);
Lua.lua_unlock(L);
if (Lua.ferror(D) != 0) cannot("write");
if (Lua.fclose(D) != 0) cannot("close");
}
return 0;
}
static int Main(string[] args)
{
// prepend the exe name to the arg list as it's done in C
// so that we don't have to change any of the args indexing
// code above
List<string> newargs = new List<string>(args);
newargs.Insert(0, Assembly.GetExecutingAssembly().Location);
args = (string[])newargs.ToArray();
Lua.lua_State L;
Smain s = new Smain();
int argc = args.Length;
int i = doargs(argc, args);
newargs.RemoveRange(0, i);
argc -= i; args = (string[])newargs.ToArray();
if (argc <= 0) usage("no input files given");
L = Lua.lua_open();
if (L == null) fatal("not enough memory for state");
s.argc = argc;
s.argv = args;
if (Lua.lua_cpcall(L, pmain, s) != 0) fatal(Lua.lua_tostring(L, -1));
Lua.lua_close(L);
return Lua.EXIT_SUCCESS;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Microsoft.TemplateEngine.Core.Contracts;
using Microsoft.TemplateEngine.Core.Util;
namespace Microsoft.TemplateEngine.Core.Operations
{
public class InlineMarkupConditional : IOperationProvider
{
private readonly string _id;
private readonly bool _initialState;
public InlineMarkupConditional(MarkupTokens tokens, bool wholeLine, bool trimWhitespace, ConditionEvaluator evaluator, string variableFormat, string id, bool initialState)
{
Tokens = tokens;
_id = id;
Evaluator = evaluator;
WholeLine = wholeLine;
TrimWhitespace = trimWhitespace;
VariableFormat = variableFormat;
_initialState = initialState;
}
public ConditionEvaluator Evaluator { get; }
public string Id => _id;
public MarkupTokens Tokens { get; }
public bool TrimWhitespace { get; }
public string VariableFormat { get; }
public bool WholeLine { get; }
public IOperation GetOperation(Encoding encoding, IProcessorState processorState)
{
TokenTrie structureTrie = new TokenTrie();
TokenTrie closeConditionTrie = new TokenTrie();
TokenTrie scanBackTrie = new TokenTrie();
IToken openOpenElementTokenBytes = Tokens.OpenOpenElementToken.ToToken(processorState.Encoding);
scanBackTrie.AddToken(openOpenElementTokenBytes);
int openOpenElementToken = structureTrie.AddToken(openOpenElementTokenBytes);
int openCloseElementToken = structureTrie.AddToken(Tokens.OpenCloseElementToken.ToToken(processorState.Encoding));
int closeCloseElementToken = structureTrie.AddToken(Tokens.CloseElementTagToken.ToToken(processorState.Encoding));
int selfClosingElementEndToken = -1;
if (Tokens.SelfClosingElementEndToken != null)
{
selfClosingElementEndToken = structureTrie.AddToken(Tokens.SelfClosingElementEndToken.ToToken(processorState.Encoding));
}
closeConditionTrie.AddToken(Tokens.CloseConditionExpression.ToToken(processorState.Encoding));
MarkupTokenMapping mapping = new MarkupTokenMapping(
openOpenElementToken,
openCloseElementToken,
closeCloseElementToken,
selfClosingElementEndToken
);
IReadOnlyList<IToken> start = new[] { Tokens.OpenConditionExpression.ToToken(processorState.Encoding) };
return new Impl(this, start, structureTrie, closeConditionTrie, scanBackTrie, mapping, _id, _initialState);
}
public class Impl : IOperation
{
private readonly ITokenTrie _closeConditionTrie;
private readonly InlineMarkupConditional _definition;
private readonly MarkupTokenMapping _mapping;
private readonly ITokenTrie _scanBackTrie;
private readonly ITokenTrie _structureTrie;
public Impl(InlineMarkupConditional definition, IReadOnlyList<IToken> tokens, ITokenTrie structureTrie, ITokenTrie closeConditionTrie, ITokenTrie scanBackTrie, MarkupTokenMapping mapping, string id, bool initialState)
{
_definition = definition;
Id = id;
Tokens = tokens;
_mapping = mapping;
_structureTrie = structureTrie;
_scanBackTrie = scanBackTrie;
_closeConditionTrie = closeConditionTrie;
IsInitialStateOn = string.IsNullOrEmpty(id) || initialState;
}
public string Id { get; }
public IReadOnlyList<IToken> Tokens { get; }
public bool IsInitialStateOn { get; }
public int HandleMatch(IProcessorState processor, int bufferLength, ref int currentBufferPosition, int token, Stream target)
{
bool flag;
if (processor.Config.Flags.TryGetValue(Conditional.OperationName, out flag) && !flag)
{
target.Write(Tokens[token].Value, Tokens[token].Start, Tokens[token].Length);
return Tokens[token].Length;
}
List<byte> conditionBytes = new List<byte>();
ScanToCloseCondition(processor, conditionBytes, ref bufferLength, ref currentBufferPosition);
byte[] condition = conditionBytes.ToArray();
EngineConfig adjustedConfig = new EngineConfig(processor.Config.EnvironmentSettings, processor.Config.Whitespaces, processor.Config.LineEndings, processor.Config.Variables, _definition.VariableFormat);
IProcessorState localState = new ProcessorState(new MemoryStream(condition), new MemoryStream(), conditionBytes.Count, int.MaxValue, adjustedConfig, new IOperationProvider[0]);
int pos = 0;
int len = conditionBytes.Count;
bool faulted;
bool value = _definition.Evaluator(localState, ref len, ref pos, out faulted);
if (faulted)
{
target.Write(Tokens[0].Value, Tokens[0].Start, Tokens[0].Length);
MemoryStream fragment = new MemoryStream();
fragment.Write(condition, 0, condition.Length);
fragment.Write(_closeConditionTrie.Tokens[0].Value, _closeConditionTrie.Tokens[0].Start, _closeConditionTrie.Tokens[0].Length);
fragment.Write(processor.CurrentBuffer, currentBufferPosition, bufferLength - currentBufferPosition);
fragment.Position = 0;
processor.Inject(fragment);
currentBufferPosition = processor.CurrentBufferPosition;
int written = Tokens[0].Length;
return written;
}
if (value)
{
processor.WhitespaceHandler(ref bufferLength, ref currentBufferPosition, trimBackward: true);
return 0;
}
processor.SeekBackUntil(_scanBackTrie, true);
FindEnd(processor, ref bufferLength, ref currentBufferPosition);
processor.WhitespaceHandler(ref bufferLength, ref currentBufferPosition, _definition.WholeLine, _definition.TrimWhitespace);
return 0;
}
private void FindEnd(IProcessorState processorState, ref int bufferLength, ref int currentBufferPosition)
{
int depth = 1;
bool inElement = true;
while (bufferLength >= _structureTrie.MinLength)
{
//Try to get at least the max length of the tree into the buffer
if (bufferLength - currentBufferPosition < _structureTrie.MaxLength)
{
processorState.AdvanceBuffer(currentBufferPosition);
currentBufferPosition = processorState.CurrentBufferPosition;
bufferLength = processorState.CurrentBufferLength;
}
int sz = bufferLength == processorState.CurrentBuffer.Length ? _structureTrie.MaxLength : _structureTrie.MinLength;
for (; currentBufferPosition < bufferLength - sz + 1; ++currentBufferPosition)
{
if (bufferLength == 0)
{
currentBufferPosition = 0;
return;
}
int token;
if (_structureTrie.GetOperation(processorState.CurrentBuffer, bufferLength, ref currentBufferPosition, out token))
{
if (token == _mapping.OpenOpenElementToken)
{
++depth;
inElement = true;
}
else if (token == _mapping.SelfClosingElementEndToken)
{
--depth;
inElement = false;
}
else if (token == _mapping.CloseElementTagToken)
{
if (inElement)
{
inElement = false;
}
else
{
--depth;
}
}
else if (token == _mapping.OpenCloseElementToken)
{
inElement = false;
}
if (depth == 0)
{
return;
}
}
}
}
//Ran out of places to check and haven't reached the actual match, consume all the way to the end
currentBufferPosition = bufferLength;
}
private void ScanToCloseCondition(IProcessorState processorState, List<byte> conditionBytes, ref int bufferLength, ref int currentBufferPosition)
{
int previousPosition = currentBufferPosition;
while (bufferLength >= _closeConditionTrie.MinLength)
{
//Try to get at least the max length of the tree into the buffer
if (bufferLength - currentBufferPosition < _closeConditionTrie.MaxLength)
{
conditionBytes.AddRange(processorState.CurrentBuffer.Skip(previousPosition).Take(currentBufferPosition - previousPosition));
processorState.AdvanceBuffer(currentBufferPosition);
currentBufferPosition = processorState.CurrentBufferPosition;
bufferLength = processorState.CurrentBufferLength;
previousPosition = 0;
}
int sz = bufferLength == processorState.CurrentBuffer.Length ? _closeConditionTrie.MaxLength : _closeConditionTrie.MinLength;
for (; currentBufferPosition < bufferLength - sz + 1; ++currentBufferPosition)
{
if (bufferLength == 0)
{
currentBufferPosition = 0;
return;
}
int token;
if (_closeConditionTrie.GetOperation(processorState.CurrentBuffer, bufferLength, ref currentBufferPosition, out token))
{
conditionBytes.AddRange(processorState.CurrentBuffer.Skip(previousPosition).Take(currentBufferPosition - previousPosition - _closeConditionTrie.Tokens[token].Length));
return;
}
}
}
//Ran out of places to check and haven't reached the actual match, consume all the way to the end
currentBufferPosition = bufferLength;
}
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using Microsoft.CodeAnalysis;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Test.Utilities;
using Xunit;
namespace Microsoft.CodeAnalysis.Editor.CSharp.UnitTests.Recommendations
{
public class ReturnKeywordRecommenderTests : KeywordRecommenderTests
{
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAtRoot_Interactive()
{
VerifyAbsence(SourceCodeKind.Script,
@"$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterClass_Interactive()
{
VerifyAbsence(SourceCodeKind.Script,
@"class C { }
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterGlobalStatement_Interactive()
{
VerifyAbsence(SourceCodeKind.Script,
@"System.Console.WriteLine();
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterGlobalVariableDeclaration_Interactive()
{
VerifyAbsence(SourceCodeKind.Script,
@"int i = 0;
$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInUsingAlias()
{
VerifyAbsence(
@"using Foo = $$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void EmptyStatement()
{
VerifyKeyword(AddInsideMethod(
@"$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void BeforeStatement()
{
VerifyKeyword(AddInsideMethod(
@"$$
return true;"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterStatement()
{
VerifyKeyword(AddInsideMethod(
@"return true;
$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterBlock()
{
VerifyKeyword(AddInsideMethod(
@"if (true) {
}
$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotAfterReturn()
{
VerifyAbsence(AddInsideMethod(
@"return $$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterYield()
{
VerifyKeyword(AddInsideMethod(
@"yield $$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInClass()
{
VerifyAbsence(@"class C
{
$$
}");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InAttributeInsideClass()
{
VerifyKeyword(
@"class C {
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InAttributeAfterAttributeInsideClass()
{
VerifyKeyword(
@"class C {
[Foo]
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InAttributeAfterMethod()
{
VerifyKeyword(
@"class C {
void Foo() {
}
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InAttributeAfterProperty()
{
VerifyKeyword(
@"class C {
int Foo {
get;
}
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InAttributeAfterField()
{
VerifyKeyword(
@"class C {
int Foo;
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InAttributeAfterEvent()
{
VerifyKeyword(
@"class C {
event Action<int> Foo;
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInOuterAttribute()
{
VerifyAbsence(SourceCodeKind.Regular,
@"[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InOuterAttributeScripting()
{
VerifyKeyword(SourceCodeKind.Script,
@"[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInParameterAttribute()
{
VerifyAbsence(
@"class C {
void Foo([$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInPropertyAttribute()
{
VerifyAbsence(
@"class C {
int Foo { [$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInEventAttribute()
{
VerifyAbsence(
@"class C {
event Action<int> Foo { [$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInClassReturnParameters()
{
VerifyAbsence(
@"class C<[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInDelegateReturnParameters()
{
VerifyAbsence(
@"delegate void D<[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInMethodReturnParameters()
{
VerifyAbsence(
@"class C {
void M<[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InInterface()
{
VerifyKeyword(
@"interface I {
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void InStruct()
{
VerifyKeyword(
@"struct S {
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void NotInEnum()
{
VerifyAbsence(
@"enum E {
[$$");
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterElse()
{
VerifyKeyword(AddInsideMethod(
@"if (foo) {
} else $$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterElseClause()
{
VerifyKeyword(AddInsideMethod(
@"if (foo) {
} else {
}
$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterFixed()
{
VerifyKeyword(AddInsideMethod(
@"fixed (byte* pResult = result) {
}
$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterSwitch()
{
VerifyKeyword(AddInsideMethod(
@"switch (foo) {
}
$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterCatch()
{
VerifyKeyword(AddInsideMethod(
@"try {
} catch {
}
$$"));
}
[Fact, Trait(Traits.Feature, Traits.Features.KeywordRecommending)]
public void AfterFinally()
{
VerifyKeyword(AddInsideMethod(
@"try {
} finally {
}
$$"));
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Xml;
using System.Collections;
using System.Diagnostics;
namespace System.Data
{
// This is an internal helper class used during Xml load to DataSet/DataDocument.
// XmlToDatasetMap class provides functionality for binding elemants/attributes
// to DataTable / DataColumn
internal sealed class XmlToDatasetMap
{
private sealed class XmlNodeIdentety
{
public string LocalName;
public string NamespaceURI;
public XmlNodeIdentety(string localName, string namespaceURI)
{
LocalName = localName;
NamespaceURI = namespaceURI;
}
public override int GetHashCode()
{
return LocalName.GetHashCode();
}
public override bool Equals(object obj)
{
XmlNodeIdentety id = (XmlNodeIdentety)obj;
return (
(string.Equals(LocalName, id.LocalName, StringComparison.OrdinalIgnoreCase)) &&
(string.Equals(NamespaceURI, id.NamespaceURI, StringComparison.OrdinalIgnoreCase))
);
}
}
// This class exist to avoid alocatin of XmlNodeIdentety to every acces to the hash table.
// Unfortunetely XmlNode doesn't export single identety object.
internal sealed class XmlNodeIdHashtable : Hashtable
{
private XmlNodeIdentety _id = new XmlNodeIdentety(string.Empty, string.Empty);
public XmlNodeIdHashtable(int capacity)
: base(capacity)
{ }
public object this[XmlNode node]
{
get
{
_id.LocalName = node.LocalName;
_id.NamespaceURI = node.NamespaceURI;
return this[_id];
}
}
public object this[XmlReader dataReader]
{
get
{
_id.LocalName = dataReader.LocalName;
_id.NamespaceURI = dataReader.NamespaceURI;
return this[_id];
}
}
public object this[DataTable table]
{
get
{
_id.LocalName = table.EncodedTableName;
_id.NamespaceURI = table.Namespace;
return this[_id];
}
}
public object this[string name]
{
get
{
_id.LocalName = name;
_id.NamespaceURI = string.Empty;
return this[_id];
}
}
}
private sealed class TableSchemaInfo
{
public DataTable TableSchema;
public XmlNodeIdHashtable ColumnsSchemaMap;
public TableSchemaInfo(DataTable tableSchema)
{
TableSchema = tableSchema;
ColumnsSchemaMap = new XmlNodeIdHashtable(tableSchema.Columns.Count);
}
}
private XmlNodeIdHashtable _tableSchemaMap; // Holds all the tables information
private TableSchemaInfo _lastTableSchemaInfo = null;
// Used to infer schema
public XmlToDatasetMap(DataSet dataSet, XmlNameTable nameTable)
{
Debug.Assert(dataSet != null, "DataSet can't be null");
Debug.Assert(nameTable != null, "NameTable can't be null");
BuildIdentityMap(dataSet, nameTable);
}
// Used to read data with known schema
public XmlToDatasetMap(XmlNameTable nameTable, DataSet dataSet)
{
Debug.Assert(dataSet != null, "DataSet can't be null");
Debug.Assert(nameTable != null, "NameTable can't be null");
BuildIdentityMap(nameTable, dataSet);
}
// Used to infer schema
public XmlToDatasetMap(DataTable dataTable, XmlNameTable nameTable)
{
Debug.Assert(dataTable != null, "DataTable can't be null");
Debug.Assert(nameTable != null, "NameTable can't be null");
BuildIdentityMap(dataTable, nameTable);
}
// Used to read data with known schema
public XmlToDatasetMap(XmlNameTable nameTable, DataTable dataTable)
{
Debug.Assert(dataTable != null, "DataTable can't be null");
Debug.Assert(nameTable != null, "NameTable can't be null");
BuildIdentityMap(nameTable, dataTable);
}
internal static bool IsMappedColumn(DataColumn c)
{
return (c.ColumnMapping != MappingType.Hidden);
}
// Used to infere schema
private TableSchemaInfo AddTableSchema(DataTable table, XmlNameTable nameTable)
{
// SDUB: Because in our case reader already read the document all names that we can meet in the
// document already has an entry in NameTable.
// If in future we will build identity map before reading XML we can replace Get() to Add()
// Sdub: GetIdentity is called from two places: BuildIdentityMap() and LoadRows()
// First case deals with decoded names; Second one with encoded names.
// We decided encoded names in first case (instead of decoding them in second)
// because it save us time in LoadRows(). We have, as usual, more data them schemas
string tableLocalName = nameTable.Get(table.EncodedTableName);
string tableNamespace = nameTable.Get(table.Namespace);
if (tableLocalName == null)
{
// because name of this table isn't present in XML we don't need mapping for it.
// Less mapping faster we work.
return null;
}
TableSchemaInfo tableSchemaInfo = new TableSchemaInfo(table);
_tableSchemaMap[new XmlNodeIdentety(tableLocalName, tableNamespace)] = tableSchemaInfo;
return tableSchemaInfo;
}
private TableSchemaInfo AddTableSchema(XmlNameTable nameTable, DataTable table)
{
// Enzol:This is the opposite of the previous function:
// we populate the nametable so that the hash comparison can happen as
// object comparison instead of strings.
// Sdub: GetIdentity is called from two places: BuildIdentityMap() and LoadRows()
// First case deals with decoded names; Second one with encoded names.
// We decided encoded names in first case (instead of decoding them in second)
// because it save us time in LoadRows(). We have, as usual, more data them schemas
string _tableLocalName = table.EncodedTableName; // Table name
string tableLocalName = nameTable.Get(_tableLocalName); // Look it up in nametable
if (tableLocalName == null)
{ // If not found
tableLocalName = nameTable.Add(_tableLocalName); // Add it
}
table._encodedTableName = tableLocalName; // And set it back
string tableNamespace = nameTable.Get(table.Namespace); // Look ip table namespace
if (tableNamespace == null)
{ // If not found
tableNamespace = nameTable.Add(table.Namespace); // Add it
}
else
{
if (table._tableNamespace != null) // Update table namespace
table._tableNamespace = tableNamespace;
}
TableSchemaInfo tableSchemaInfo = new TableSchemaInfo(table);
// Create new table schema info
_tableSchemaMap[new XmlNodeIdentety(tableLocalName, tableNamespace)] = tableSchemaInfo;
// And add it to the hashtable
return tableSchemaInfo; // Return it as we have to populate
// Column schema map and Child table
// schema map in it
}
private bool AddColumnSchema(DataColumn col, XmlNameTable nameTable, XmlNodeIdHashtable columns)
{
string columnLocalName = nameTable.Get(col.EncodedColumnName);
string columnNamespace = nameTable.Get(col.Namespace);
if (columnLocalName == null)
{
return false;
}
XmlNodeIdentety idColumn = new XmlNodeIdentety(columnLocalName, columnNamespace);
columns[idColumn] = col;
if (col.ColumnName.StartsWith("xml", StringComparison.OrdinalIgnoreCase))
{
HandleSpecialColumn(col, nameTable, columns);
}
return true;
}
private bool AddColumnSchema(XmlNameTable nameTable, DataColumn col, XmlNodeIdHashtable columns)
{
string _columnLocalName = XmlConvert.EncodeLocalName(col.ColumnName);
string columnLocalName = nameTable.Get(_columnLocalName); // Look it up in a name table
if (columnLocalName == null)
{ // Not found?
columnLocalName = nameTable.Add(_columnLocalName); // Add it
}
col._encodedColumnName = columnLocalName; // And set it back
string columnNamespace = nameTable.Get(col.Namespace); // Get column namespace from nametable
if (columnNamespace == null)
{ // Not found ?
columnNamespace = nameTable.Add(col.Namespace); // Add it
}
else
{
if (col._columnUri != null) // Update namespace
col._columnUri = columnNamespace;
}
// Create XmlNodeIdentety
// for this column
XmlNodeIdentety idColumn = new XmlNodeIdentety(columnLocalName, columnNamespace);
columns[idColumn] = col; // And add it to hashtable
if (col.ColumnName.StartsWith("xml", StringComparison.OrdinalIgnoreCase))
{
HandleSpecialColumn(col, nameTable, columns);
}
return true;
}
private void BuildIdentityMap(DataSet dataSet, XmlNameTable nameTable)
{
_tableSchemaMap = new XmlNodeIdHashtable(dataSet.Tables.Count);
foreach (DataTable t in dataSet.Tables)
{
TableSchemaInfo tableSchemaInfo = AddTableSchema(t, nameTable);
if (tableSchemaInfo != null)
{
foreach (DataColumn c in t.Columns)
{
// don't include auto-generated PK, FK and any hidden columns to be part of mapping
if (IsMappedColumn(c))
{
AddColumnSchema(c, nameTable, tableSchemaInfo.ColumnsSchemaMap);
}
}
}
}
}
// This one is used while reading data with preloaded schema
private void BuildIdentityMap(XmlNameTable nameTable, DataSet dataSet)
{
_tableSchemaMap = new XmlNodeIdHashtable(dataSet.Tables.Count);
// This hash table contains
// tables schemas as TableSchemaInfo objects
// These objects holds reference to the table.
// Hash tables with columns schema maps
// and child tables schema maps
string dsNamespace = nameTable.Get(dataSet.Namespace); // Attept to look up DataSet namespace
// in the name table
if (dsNamespace == null)
{ // Found ?
dsNamespace = nameTable.Add(dataSet.Namespace); // Nope. Add it
}
dataSet._namespaceURI = dsNamespace; // Set a DataSet namespace URI
foreach (DataTable t in dataSet.Tables)
{ // For each table
TableSchemaInfo tableSchemaInfo = AddTableSchema(nameTable, t);
// Add table schema info to hash table
if (tableSchemaInfo != null)
{
foreach (DataColumn c in t.Columns)
{ // Add column schema map
// don't include auto-generated PK, FK and any hidden columns to be part of mapping
if (IsMappedColumn(c))
{ // If mapped column
AddColumnSchema(nameTable, c, tableSchemaInfo.ColumnsSchemaMap);
} // Add it to the map
}
// Add child nested tables to the schema
foreach (DataRelation r in t.ChildRelations)
{ // Do we have a child tables ?
if (r.Nested)
{ // Is it nested?
// don't include non nested tables
// Handle namespaces and names as usuall
string _tableLocalName = XmlConvert.EncodeLocalName(r.ChildTable.TableName);
string tableLocalName = nameTable.Get(_tableLocalName);
if (tableLocalName == null)
{
tableLocalName = nameTable.Add(_tableLocalName);
}
string tableNamespace = nameTable.Get(r.ChildTable.Namespace);
if (tableNamespace == null)
{
tableNamespace = nameTable.Add(r.ChildTable.Namespace);
}
XmlNodeIdentety idTable = new XmlNodeIdentety(tableLocalName, tableNamespace);
tableSchemaInfo.ColumnsSchemaMap[idTable] = r.ChildTable;
}
}
}
}
}
// Used for inference
private void BuildIdentityMap(DataTable dataTable, XmlNameTable nameTable)
{
_tableSchemaMap = new XmlNodeIdHashtable(1);
TableSchemaInfo tableSchemaInfo = AddTableSchema(dataTable, nameTable);
if (tableSchemaInfo != null)
{
foreach (DataColumn c in dataTable.Columns)
{
// don't include auto-generated PK, FK and any hidden columns to be part of mapping
if (IsMappedColumn(c))
{
AddColumnSchema(c, nameTable, tableSchemaInfo.ColumnsSchemaMap);
}
}
}
}
// This one is used while reading data with preloaded schema
private void BuildIdentityMap(XmlNameTable nameTable, DataTable dataTable)
{
ArrayList tableList = GetSelfAndDescendants(dataTable); // Get list of tables we're loading
// This includes our table and
// related tables tree
_tableSchemaMap = new XmlNodeIdHashtable(tableList.Count);
// Create hash table to hold all
// tables to load.
foreach (DataTable t in tableList)
{ // For each table
TableSchemaInfo tableSchemaInfo = AddTableSchema(nameTable, t);
// Create schema info
if (tableSchemaInfo != null)
{
foreach (DataColumn c in t.Columns)
{ // Add column information
// don't include auto-generated PK, FK and any hidden columns to be part of mapping
if (IsMappedColumn(c))
{
AddColumnSchema(nameTable, c, tableSchemaInfo.ColumnsSchemaMap);
}
}
foreach (DataRelation r in t.ChildRelations)
{ // Add nested tables information
if (r.Nested)
{ // Is it nested?
// don't include non nested tables
// Handle namespaces and names as usuall
string _tableLocalName = XmlConvert.EncodeLocalName(r.ChildTable.TableName);
string tableLocalName = nameTable.Get(_tableLocalName);
if (tableLocalName == null)
{
tableLocalName = nameTable.Add(_tableLocalName);
}
string tableNamespace = nameTable.Get(r.ChildTable.Namespace);
if (tableNamespace == null)
{
tableNamespace = nameTable.Add(r.ChildTable.Namespace);
}
XmlNodeIdentety idTable = new XmlNodeIdentety(tableLocalName, tableNamespace);
tableSchemaInfo.ColumnsSchemaMap[idTable] = r.ChildTable;
}
}
}
}
}
private ArrayList GetSelfAndDescendants(DataTable dt)
{ // breadth-first
ArrayList tableList = new ArrayList();
tableList.Add(dt);
int nCounter = 0;
while (nCounter < tableList.Count)
{
foreach (DataRelation childRelations in ((DataTable)tableList[nCounter]).ChildRelations)
{
if (!tableList.Contains(childRelations.ChildTable))
tableList.Add(childRelations.ChildTable);
}
nCounter++;
}
return tableList;
}
// Used to infer schema and top most node
public object GetColumnSchema(XmlNode node, bool fIgnoreNamespace)
{
Debug.Assert(node != null, "Argument validation");
TableSchemaInfo tableSchemaInfo = null;
XmlNode nodeRegion = (node.NodeType == XmlNodeType.Attribute) ? ((XmlAttribute)node).OwnerElement : node.ParentNode;
do
{
if (nodeRegion == null || nodeRegion.NodeType != XmlNodeType.Element)
{
return null;
}
tableSchemaInfo = (TableSchemaInfo)(fIgnoreNamespace ? _tableSchemaMap[nodeRegion.LocalName] : _tableSchemaMap[nodeRegion]);
nodeRegion = nodeRegion.ParentNode;
} while (tableSchemaInfo == null);
if (fIgnoreNamespace)
return tableSchemaInfo.ColumnsSchemaMap[node.LocalName];
else
return tableSchemaInfo.ColumnsSchemaMap[node];
}
public object GetColumnSchema(DataTable table, XmlReader dataReader, bool fIgnoreNamespace)
{
if ((_lastTableSchemaInfo == null) || (_lastTableSchemaInfo.TableSchema != table))
{
_lastTableSchemaInfo = (TableSchemaInfo)(fIgnoreNamespace ? _tableSchemaMap[table.EncodedTableName] : _tableSchemaMap[table]);
}
if (fIgnoreNamespace)
return _lastTableSchemaInfo.ColumnsSchemaMap[dataReader.LocalName];
return _lastTableSchemaInfo.ColumnsSchemaMap[dataReader];
}
// Used to infer schema
public object GetSchemaForNode(XmlNode node, bool fIgnoreNamespace)
{
TableSchemaInfo tableSchemaInfo = null;
if (node.NodeType == XmlNodeType.Element)
{ // If element
tableSchemaInfo = (TableSchemaInfo)(fIgnoreNamespace ? _tableSchemaMap[node.LocalName] : _tableSchemaMap[node]);
} // Look up table schema info for it
if (tableSchemaInfo != null)
{ // Got info ?
return tableSchemaInfo.TableSchema; // Yes, Return table
}
return GetColumnSchema(node, fIgnoreNamespace); // Attempt to locate column
}
public DataTable GetTableForNode(XmlReader node, bool fIgnoreNamespace)
{
TableSchemaInfo tableSchemaInfo = (TableSchemaInfo)(fIgnoreNamespace ? _tableSchemaMap[node.LocalName] : _tableSchemaMap[node]);
if (tableSchemaInfo != null)
{
_lastTableSchemaInfo = tableSchemaInfo;
return _lastTableSchemaInfo.TableSchema;
}
return null;
}
private void HandleSpecialColumn(DataColumn col, XmlNameTable nameTable, XmlNodeIdHashtable columns)
{
// if column name starts with xml, we encode it manualy and add it for look up
Debug.Assert(col.ColumnName.StartsWith("xml", StringComparison.OrdinalIgnoreCase), "column name should start with xml");
string tempColumnName;
if ('x' == col.ColumnName[0])
{
tempColumnName = "_x0078_"; // lower case xml... -> _x0078_ml...
}
else
{
tempColumnName = "_x0058_"; // upper case Xml... -> _x0058_ml...
}
tempColumnName += col.ColumnName.Substring(1);
if (nameTable.Get(tempColumnName) == null)
{
nameTable.Add(tempColumnName);
}
string columnNamespace = nameTable.Get(col.Namespace);
XmlNodeIdentety idColumn = new XmlNodeIdentety(tempColumnName, columnNamespace);
columns[idColumn] = col;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Net.Http.Headers;
namespace EmsApi.Client.V2
{
/// <summary>
/// The configuration to use when talking to the EMS API. This may be
/// modified after the <seealso cref="EmsApiService"/> has been created
/// by setting the <seealso cref="EmsApiService.ServiceConfig"/> property.
/// </summary>
public class EmsApiServiceConfiguration
{
/// <summary>
/// Creates a new instance of the configuration with the given endpoint.
/// </summary>
/// <param name="useEnvVars">
/// When true, system environment variables will be used to substitute certain
/// parameters when the configuration is first constructed.
/// </param>
public EmsApiServiceConfiguration( bool useEnvVars = true )
{
UseCompression = true;
ThrowExceptionOnAuthFailure = true;
ThrowExceptionOnApiFailure = true;
if( useEnvVars )
LoadEnvironmentVariables();
}
/// <summary>
/// The API endpoint to connect to. This may be substituted by the "EmsApiEndpoint"
/// environment variable.
/// </summary>
public string Endpoint { get; set; }
/// <summary>
/// The user name to use for authentication. This may be substituted by the "EmsApiUsername"
/// environment variable.
/// </summary>
public string UserName { get; set; }
/// <summary>
/// The password to use for authentication. This may be substituted by the "EmsApiPassword"
/// environment variable, which should contain a base64 encoded version of the password.
/// </summary>
public string Password { get; set; }
/// <summary>
/// The API Client id to use for trusted authentication. This may be substituted by the "EmsApiClientId"
/// environment variable.
///
/// You will need to also set ApiClientSecret and a TrustedAuthName and TrustedAuthValue in the CallContext
/// (although TrustedAuthName can be set here in config if it is the same for this service configuration).
///
/// You can set BOTH UserName+Password AND ApiClientId+ApiClientSecret. If you do that then any CallContexts
/// which do NOT include TrustedAuthValue being set will use the Password Authentication path (and the
/// username and password). Any CallContexts which DO include TrustedAuthValue being set will use the Trusted
/// Authentication path.
/// </summary>
public string ApiClientId { get; set; }
/// <summary>
/// The API Client secret to use for trusted authentication. This may be substituted by the "EmsApiClientSecret"
/// environment variable, which should contain a base64 encoded version of the client secret.
/// See <seealso cref="ApiClientId"/> for other trusted authentication requirements.
/// </summary>
public string ApiClientSecret { get; set; }
/// <summary>
/// The property name to search in EFOQA classic AD as part of trusted authentication. This may be substituted
/// by the "EmsApiTrustedAuthName" environment variable.
/// This can instead be set in the <seealso cref="CallContext.TrustedAuthName"/> if you wish to make it a
/// per-call setting. That setting will override whatever is set here.
/// </summary>
public string TrustedAuthName { get; set; }
/// <summary>
/// The application name to pass along to the EMS API. This is used for logging on the
/// server side.
/// </summary>
public string ApplicationName { get; set; }
/// <summary>
/// The user agent header to pass along to the EMS API.
/// </summary>
public string UserAgent
{
get
{
Version version = System.Reflection.Assembly.GetExecutingAssembly().GetName().Version;
return $"EmsApi.Client v{version.Major}.{version.Minor}.{version.Build}";
}
}
/// <summary>
/// When true, gzip compression will be used for responses on routes that support it.
/// This is enabled by default. Responses are automatically decompressed by the library,
/// so there's no advantage to disabling this unless you are running in a CPU constrained
/// scenario.
/// </summary>
public bool UseCompression { get; set; }
/// <summary>
/// When true, the <seealso cref="EmsApiService"/> will throw an exception for
/// authentication failures. This is the default behavior, because opting out
/// of exceptions requires implementing additional callback functions. Callbacks
/// are always executed regardless of this setting.
/// </summary>
public bool ThrowExceptionOnAuthFailure { get; set; }
/// <summary>
/// When true, the <seealso cref="EmsApiService"/> will throw an exception for
/// any low level API failures. This is the default behavior, because opting out
/// of exceptions requires implementing additional callback functions. Callbacks
/// are always executed regardless of this setting.
/// </summary>
public bool ThrowExceptionOnApiFailure { get; set; }
/// <summary>
/// When true, the <seealso cref="EmsApiService"/> will throw an exception for
/// any API calls which do *not* have a CallContext passed into them.
/// This is useful in some scenarios where you want to enforce callers to provide a
/// call context to be more explicit.
/// </summary>
public bool RequireCallContext { get; set; }
/// <summary>
/// Any customer headers that should be appended to a request. These are appended
/// at the time of making the request so they can be altered on a per request basis.
/// This is a good place to set the "X-Adi-Client-Username" and "X-Adi-Correlation-Id"
/// headers. It's up to the application performing the requests to add and remove these
/// headers.
/// </summary>
public Dictionary<string, string> CustomHeaders { get; set; }
/// <summary>
/// Adds the default headers into the given header collection.
/// </summary>
public void AddDefaultRequestHeaders( HttpRequestHeaders headerCollection )
{
headerCollection.Add( HttpHeaderNames.UserAgent, UserAgent );
// Optional application name.
if( !string.IsNullOrEmpty( ApplicationName ) )
headerCollection.Add( HttpHeaderNames.ApplicationName, ApplicationName );
// Optional compression header.
if( UseCompression )
headerCollection.Add( HttpHeaderNames.AcceptEncoding, "gzip" );
}
/// <summary>
/// Returns true if the configuration is valid, or false if not.
/// </summary>
/// <param name="error">
/// The reason the configuration is invalid.
/// </param>
public bool Validate( out string error )
{
error = null;
// Make sure the user did not set the endpoint back to null.
if( string.IsNullOrEmpty( Endpoint ) )
{
error = "The API endpoint is not set.";
return false;
}
// Make sure if we have a username we have a password.
bool usernameSet = !string.IsNullOrEmpty( UserName );
if( usernameSet )
{
if( string.IsNullOrEmpty( Password ) )
{
error = "A password was not provided for the given username.";
return false;
}
}
// Make sure if we have a client id that we have a client secret.
bool clientIdSet = !string.IsNullOrEmpty( ApiClientId );
if( clientIdSet )
{
if( string.IsNullOrEmpty( ApiClientSecret ) )
{
error = "An API client secret was not provided for the given API client id.";
return false;
}
}
// Validate we have some form of authentication specified.
if( !usernameSet && !clientIdSet )
{
error = "Either a username and password or API client id and secret must be provided.";
return false;
}
return true;
}
/// <summary>
/// Loads some well-known environment variables into the current configuration.
/// </summary>
private void LoadEnvironmentVariables()
{
string endpoint = Environment.GetEnvironmentVariable( "EmsApiEndpoint" );
string user = Environment.GetEnvironmentVariable( "EmsApiUsername" );
string base64pass = Environment.GetEnvironmentVariable( "EmsApiPassword" );
string clientId = Environment.GetEnvironmentVariable( "EmsApiClientId" );
string base64ClientSecret = Environment.GetEnvironmentVariable( "EmsApiClientSecret" );
string clientTrustedAuthName = Environment.GetEnvironmentVariable( "EmsApiTrustedAuthName" );
if( !string.IsNullOrWhiteSpace( endpoint ) )
Endpoint = endpoint.Trim();
if( !string.IsNullOrWhiteSpace( user ) )
UserName = user.Trim();
if( !string.IsNullOrWhiteSpace( base64pass ) )
{
byte[] passBytes = Convert.FromBase64String( base64pass.Trim() );
Password = System.Text.Encoding.UTF8.GetString( passBytes );
}
if( !string.IsNullOrWhiteSpace( clientId ) )
ApiClientId = user.Trim();
if( !string.IsNullOrWhiteSpace( base64ClientSecret ) )
{
byte[] secretBytes = Convert.FromBase64String( base64ClientSecret.Trim() );
ApiClientSecret = System.Text.Encoding.UTF8.GetString( secretBytes );
}
if( !string.IsNullOrWhiteSpace( clientTrustedAuthName ) )
TrustedAuthName = clientTrustedAuthName.Trim();
}
/// <summary>
/// Retrun a copy of the configuration.
/// </summary>
public EmsApiServiceConfiguration Clone()
{
return new EmsApiServiceConfiguration
{
Endpoint = Endpoint,
UserName = UserName,
Password = Password,
ApiClientId = ApiClientId,
ApiClientSecret = ApiClientSecret,
TrustedAuthName = TrustedAuthName,
ApplicationName = ApplicationName,
ThrowExceptionOnApiFailure = ThrowExceptionOnApiFailure,
ThrowExceptionOnAuthFailure = ThrowExceptionOnAuthFailure,
RequireCallContext = RequireCallContext,
CustomHeaders = CustomHeaders
};
}
}
}
| |
/*
Copyright 2012 Michael Edwards
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//-CRE-
using System;
using Glass.Mapper.Sc.Configuration;
using Glass.Mapper.Sc.DataMappers;
using NUnit.Framework;
using Sitecore.Data;
namespace Glass.Mapper.Sc.Tests.DataMappers
{
[TestFixture]
public class SitecoreFieldEnumMapperFixture : AbstractMapperFixture
{
#region Method - GetField
[Test]
public void GetField_FieldContainsValidEnum_ReturnsEnum()
{
//Assign
string fieldValue = "Value1";
StubEnum expected = StubEnum.Value1;
var fieldId = Guid.NewGuid();
var item = Helpers.CreateFakeItem(fieldId, fieldValue);
var field = item.Fields[new ID(fieldId)];
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof (Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
//Act
var result = (StubEnum)mapper.GetField(field, config, null);
//Assert
Assert.AreEqual(expected, result);
}
[Test]
public void GetField_FieldContainsValidEnumInteger_ReturnsEnum()
{
//Assign
string fieldValue = "2";
StubEnum expected = StubEnum.Value2;
var fieldId = Guid.NewGuid();
var item = Helpers.CreateFakeItem(fieldId, fieldValue);
var field = item.Fields[new ID(fieldId)];
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof(Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
//Act
var result = (StubEnum)mapper.GetField(field, config, null);
//Assert
Assert.AreEqual(expected, result);
}
[Test]
public void GetField_FieldContainsEmptyString_ThowsMapperException()
{
//Assign
string fieldValue = string.Empty;
var fieldId = Guid.NewGuid();
var item = Helpers.CreateFakeItem(fieldId, fieldValue);
var field = item.Fields[new ID(fieldId)];
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof(Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
//Act
var result = (StubEnum)mapper.GetField(field, config, null);
//Assert
Assert.AreEqual(StubEnum.Value1, result);
}
[Test]
[ExpectedException(typeof (MapperException))]
public void GetField_FieldContainsInvalidValidEnum_ThrowsException()
{
//Assign
string fieldValue = "hello world";
var fieldId = Guid.NewGuid();
var item = Helpers.CreateFakeItem(fieldId, fieldValue);
var field = item.Fields[new ID(fieldId)];
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof(Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
//Act
var result = (StubEnum)mapper.GetField(field, config, null);
//Assert
}
#endregion
#region Method - SetField
[Test]
public void SetField_ObjectisValidEnum_SetsFieldValue()
{
//Assign
string expected = "Value2";
StubEnum objectValue = StubEnum.Value2;
var fieldId = Guid.NewGuid();
var item = Helpers.CreateFakeItem(fieldId, string.Empty);
var field = item.Fields[new ID(fieldId)];
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof(Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
item.Editing.BeginEdit();
//Act
mapper.SetField(field, objectValue, config, null);
//Assert
Assert.AreEqual(expected, field.Value);
}
[Test]
[ExpectedException(typeof (ArgumentException))]
public void SetField_ObjectIsInt_ThrowsException()
{
//Assign
string objectValue = "hello world";
var fieldId = Guid.NewGuid();
var item = Helpers.CreateFakeItem(fieldId, string.Empty);
var field = item.Fields[new ID(fieldId)];
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof(Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
//Act
mapper.SetField(field, objectValue, config, null);
//Assert
}
#endregion
#region Method - CanHandle
[Test]
public void CanHandle_PropertyIsEnum_ReturnsTrue()
{
//Assign
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof (Stub).GetProperty("Property");
var mapper = new SitecoreFieldEnumMapper();
//Act
var result = mapper.CanHandle(config, null);
//Assert
Assert.IsTrue(result);
}
[Test]
public void CanHandle_PropertyIsNotEnum_ReturnsTrue()
{
//Assign
var config = new SitecoreFieldConfiguration();
config.PropertyInfo = typeof(Stub).GetProperty("PropertyNotEnum");
var mapper = new SitecoreFieldEnumMapper();
//Act
var result = mapper.CanHandle(config, null);
//Assert
Assert.IsFalse(result);
}
#endregion
#region Stub
public enum StubEnum
{
Value1 =1,
Value2 = 2
}
public class Stub
{
public StubEnum Property { get; set; }
public string PropertyNotEnum { get; set; }
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// See the LICENSE file in the project root for more information.
//
// System.Configuration.ConfigurationManagerTest.cs - Unit tests
// for System.Configuration.ConfigurationManager.
//
// Author:
// Chris Toshok <[email protected]>
// Atsushi Enomoto <[email protected]>
//
// Copyright (C) 2005-2006 Novell, Inc (http://www.novell.com)
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Collections.Specialized;
using System.Configuration;
using System.IO;
using Xunit;
using SysConfig = System.Configuration.Configuration;
namespace MonoTests.System.Configuration
{
using Util;
public class ConfigurationManagerTest
{
[Fact] // OpenExeConfiguration (ConfigurationUserLevel)
[ActiveIssue("dotnet/corefx #19384", TargetFrameworkMonikers.NetFramework)]
public void OpenExeConfiguration1_UserLevel_None()
{
SysConfig config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal(TestUtil.ThisConfigFileName, fi.Name);
}
[Fact]
public void OpenExeConfiguration1_UserLevel_PerUserRoaming()
{
string applicationData = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData);
// If there is not ApplicationData folder PerUserRoaming won't work
if (string.IsNullOrEmpty(applicationData)) return;
SysConfig config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.PerUserRoaming);
Assert.False(string.IsNullOrEmpty(config.FilePath), "should have some file path");
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("user.config", fi.Name);
}
[Fact]
[ActiveIssue(15065, TestPlatforms.AnyUnix)]
public void OpenExeConfiguration1_UserLevel_PerUserRoamingAndLocal()
{
SysConfig config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.PerUserRoamingAndLocal);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("user.config", fi.Name);
}
[Fact] // OpenExeConfiguration (String)
public void OpenExeConfiguration2()
{
using (var temp = new TempDirectory())
{
string exePath;
SysConfig config;
exePath = Path.Combine(temp.Path, "DoesNotExist.whatever");
File.Create(exePath).Close();
config = ConfigurationManager.OpenExeConfiguration(exePath);
Assert.Equal(exePath + ".config", config.FilePath);
exePath = Path.Combine(temp.Path, "SomeExecutable.exe");
File.Create(exePath).Close();
config = ConfigurationManager.OpenExeConfiguration(exePath);
Assert.Equal(exePath + ".config", config.FilePath);
exePath = Path.Combine(temp.Path, "Foo.exe.config");
File.Create(exePath).Close();
config = ConfigurationManager.OpenExeConfiguration(exePath);
Assert.Equal(exePath + ".config", config.FilePath);
}
}
[Fact] // OpenExeConfiguration (String)
public void OpenExeConfiguration2_ExePath_DoesNotExist()
{
using (var temp = new TempDirectory())
{
string exePath = Path.Combine(temp.Path, "DoesNotExist.exe");
ConfigurationErrorsException ex = Assert.Throws<ConfigurationErrorsException>(
() => ConfigurationManager.OpenExeConfiguration(exePath));
// An error occurred loading a configuration file:
// The parameter 'exePath' is invalid
Assert.Equal(typeof(ConfigurationErrorsException), ex.GetType());
Assert.Null(ex.Filename);
Assert.NotNull(ex.InnerException);
Assert.Equal(0, ex.Line);
Assert.NotNull(ex.Message);
// The parameter 'exePath' is invalid
ArgumentException inner = ex.InnerException as ArgumentException;
Assert.NotNull(inner);
Assert.Equal(typeof(ArgumentException), inner.GetType());
Assert.Null(inner.InnerException);
Assert.NotNull(inner.Message);
Assert.Equal("exePath", inner.ParamName);
}
}
[Fact]
[ActiveIssue("dotnet/corefx #18831", TargetFrameworkMonikers.NetFramework)]
public void exePath_UserLevelNone()
{
string name = TestUtil.ThisApplicationPath;
SysConfig config = ConfigurationManager.OpenExeConfiguration(name);
Assert.Equal(TestUtil.ThisApplicationPath + ".config", config.FilePath);
}
[Fact]
public void exePath_UserLevelPerRoaming()
{
string applicationData = Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData);
// If there is not ApplicationData folder PerUserRoaming won't work
if (string.IsNullOrEmpty(applicationData)) return;
Assert.True(Directory.Exists(applicationData), "application data should exist");
SysConfig config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.PerUserRoaming);
string filePath = config.FilePath;
Assert.False(string.IsNullOrEmpty(filePath), "should have some file path");
Assert.True(filePath.StartsWith(applicationData), "#1:" + filePath);
Assert.Equal("user.config", Path.GetFileName(filePath));
}
[Fact]
[ActiveIssue(15066, TestPlatforms.AnyUnix)]
public void exePath_UserLevelPerRoamingAndLocal()
{
SysConfig config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.PerUserRoamingAndLocal);
string filePath = config.FilePath;
string applicationData = Environment.GetFolderPath(Environment.SpecialFolder.LocalApplicationData);
Assert.True(filePath.StartsWith(applicationData), "#1:" + filePath);
Assert.Equal("user.config", Path.GetFileName(filePath));
}
[Fact]
public void mapped_UserLevelNone()
{
ExeConfigurationFileMap map = new ExeConfigurationFileMap();
map.ExeConfigFilename = "execonfig";
SysConfig config = ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.None);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("execonfig", fi.Name);
}
[Fact]
public void mapped_UserLevelPerRoaming()
{
ExeConfigurationFileMap map = new ExeConfigurationFileMap();
map.ExeConfigFilename = "execonfig";
map.RoamingUserConfigFilename = "roaminguser";
SysConfig config = ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.PerUserRoaming);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("roaminguser", fi.Name);
}
[Fact]
// Doesn't pass on Mono
// [Category("NotWorking")]
public void mapped_UserLevelPerRoaming_no_execonfig()
{
ExeConfigurationFileMap map = new ExeConfigurationFileMap();
map.RoamingUserConfigFilename = "roaminguser";
Assert.Throws<ArgumentException>(() => ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.PerUserRoaming));
}
[Fact]
public void mapped_UserLevelPerRoamingAndLocal()
{
ExeConfigurationFileMap map = new ExeConfigurationFileMap();
map.ExeConfigFilename = "execonfig";
map.RoamingUserConfigFilename = "roaminguser";
map.LocalUserConfigFilename = "localuser";
SysConfig config = ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.PerUserRoamingAndLocal);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("localuser", fi.Name);
}
[Fact]
// Doesn't pass on Mono
// [Category("NotWorking")]
public void mapped_UserLevelPerRoamingAndLocal_no_execonfig()
{
ExeConfigurationFileMap map = new ExeConfigurationFileMap();
map.RoamingUserConfigFilename = "roaminguser";
map.LocalUserConfigFilename = "localuser";
Assert.Throws<ArgumentException>(() => ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.PerUserRoamingAndLocal));
}
[Fact]
// Doesn't pass on Mono
// [Category("NotWorking")]
public void mapped_UserLevelPerRoamingAndLocal_no_roaminguser()
{
ExeConfigurationFileMap map = new ExeConfigurationFileMap();
map.ExeConfigFilename = "execonfig";
map.LocalUserConfigFilename = "localuser";
Assert.Throws<ArgumentException>(() => ConfigurationManager.OpenMappedExeConfiguration(map, ConfigurationUserLevel.PerUserRoamingAndLocal));
}
[Fact]
public void MachineConfig()
{
SysConfig config = ConfigurationManager.OpenMachineConfiguration();
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("machine.config", fi.Name);
}
[Fact]
public void mapped_MachineConfig()
{
ConfigurationFileMap map = new ConfigurationFileMap();
map.MachineConfigFilename = "machineconfig";
SysConfig config = ConfigurationManager.OpenMappedMachineConfiguration(map);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("machineconfig", fi.Name);
}
[Fact]
// Doesn't pass on Mono
// [Category("NotWorking")]
[ActiveIssue("dotnet/corefx #19384", TargetFrameworkMonikers.NetFramework)]
public void mapped_ExeConfiguration_null()
{
SysConfig config = ConfigurationManager.OpenMappedExeConfiguration(null, ConfigurationUserLevel.None);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal(TestUtil.ThisConfigFileName, fi.Name);
}
[Fact]
// Doesn't pass on Mono
// [Category("NotWorking")]
public void mapped_MachineConfig_null()
{
SysConfig config = ConfigurationManager.OpenMappedMachineConfiguration(null);
FileInfo fi = new FileInfo(config.FilePath);
Assert.Equal("machine.config", fi.Name);
}
[Fact]
public void GetSectionReturnsNativeObject()
{
Assert.True(ConfigurationManager.GetSection("appSettings") is NameValueCollection);
}
[Fact] // Test for bug #3412
// Doesn't pass on Mono
// [Category("NotWorking")]
public void TestAddRemoveSection()
{
const string name = "testsection";
var config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
// ensure not present
if (config.Sections.Get(name) != null)
{
config.Sections.Remove(name);
}
// add
config.Sections.Add(name, new TestSection());
// remove
var section = config.Sections.Get(name);
Assert.NotNull(section);
Assert.NotNull(section as TestSection);
config.Sections.Remove(name);
// add
config.Sections.Add(name, new TestSection());
// remove
section = config.Sections.Get(name);
Assert.NotNull(section);
Assert.NotNull(section as TestSection);
config.Sections.Remove(name);
}
[Fact]
public void TestFileMap()
{
using (var temp = new TempDirectory())
{
string configPath = Path.Combine(temp.Path, Path.GetRandomFileName() + ".config");
Assert.False(File.Exists(configPath));
var map = new ExeConfigurationFileMap();
map.ExeConfigFilename = configPath;
var config = ConfigurationManager.OpenMappedExeConfiguration(
map, ConfigurationUserLevel.None);
config.Sections.Add("testsection", new TestSection());
config.Save();
Assert.True(File.Exists(configPath), "#1");
Assert.True(File.Exists(Path.GetFullPath(configPath)), "#2");
}
}
[Fact]
public void TestContext()
{
var config = ConfigurationManager.OpenExeConfiguration(ConfigurationUserLevel.None);
const string name = "testsection";
// ensure not present
if (config.GetSection(name) != null)
config.Sections.Remove(name);
var section = new TestContextSection();
// Can't access EvaluationContext ....
Assert.Throws<ConfigurationErrorsException>(() => section.TestContext(null));
// ... until it's been added to a section.
config.Sections.Add(name, section);
section.TestContext("#2");
// Remove ...
config.Sections.Remove(name);
// ... and it doesn't lose its context
section.TestContext(null);
}
[Fact]
public void TestContext2()
{
using (var temp = new TempDirectory())
{
string configPath = Path.Combine(temp.Path, Path.GetRandomFileName() + ".config");
Assert.False(File.Exists(configPath));
var map = new ExeConfigurationFileMap();
map.ExeConfigFilename = configPath;
var config = ConfigurationManager.OpenMappedExeConfiguration(
map, ConfigurationUserLevel.None);
config.Sections.Add("testsection", new TestSection());
config.Sections.Add("testcontext", new TestContextSection());
config.Save();
Assert.True(File.Exists(configPath), "#1");
}
}
class TestSection : ConfigurationSection { }
class TestContextSection : ConfigurationSection
{
public void TestContext(string label)
{
Assert.NotNull(EvaluationContext);
}
}
[Fact]
public void BadConfig()
{
using (var temp = new TempDirectory())
{
string xml = @" badXml";
var file = Path.Combine(temp.Path, "badConfig.config");
File.WriteAllText(file, xml);
var fileMap = new ConfigurationFileMap(file);
Assert.Equal(file,
Assert.Throws<ConfigurationErrorsException>(() => ConfigurationManager.OpenMappedMachineConfiguration(fileMap)).Filename);
}
}
}
}
| |
using System;
using System.Collections;
using System.Data.SqlClient;
using System.IO;
using System.Web.UI;
using System.Web.UI.HtmlControls;
using System.Web.UI.WebControls;
using Rainbow.Framework;
using Rainbow.Framework.Data;
using Rainbow.Framework.Site.Configuration;
using Rainbow.Framework.Site.Data;
using Rainbow.Framework.Content.Data;
using Rainbow.Framework.Users.Data;
using Rainbow.Framework.Helpers;
using Rainbow.Framework.Web.UI;
using Label = System.Web.UI.WebControls.Label;
using Page = System.Web.UI.Page;
namespace Rainbow.Content.Web.Modules
{
/// <summary>
/// Users Defined Table module - Edit page part
/// Written by: Shaun Walker (IbuySpy Workshop)
/// Moved into Rainbow by Jakob Hansen, [email protected]
/// </summary>
[Rainbow.Framework.History("Ender", "2003/03/18", "Added file and Xsl functionality")]
[Rainbow.Framework.History("[email protected]", "2004/05/28", "Added image and file selection by dropdown functionality")]
public partial class UserDefinedTableEdit : EditItemPage
{
private int UserDefinedRowID = -1;
//protected string prefix = "_ctl0:";
protected string prefix = "";
/// <summary>
/// The Page_Load event on this Page is used to ...
/// </summary>
/// <param name="sender">The source of the event.</param>
/// <param name="e">The <see cref="T:System.EventArgs"/> instance containing the event data.</param>
private void Page_Load(object sender, EventArgs e)
{
if (! (Request.Params["UserDefinedRowID"] == null) )
UserDefinedRowID = Int32.Parse(Request.Params["UserDefinedRowID"].ToString());
BuildTable();
if ( Page.IsPostBack == false )
{
if ( UserDefinedRowID != -1 )
{
Control tb;
UserDefinedTableDB objUserDefinedTable = new UserDefinedTableDB();
SqlDataReader dr = objUserDefinedTable.GetSingleUserDefinedRow(UserDefinedRowID, ModuleID);
try
{
while (dr.Read())
{
tb = tblFields.FindControl(dr["FieldTitle"].ToString());
if(tb != null )
{
if (tb.GetType() == typeof(TextBox))
((TextBox) tb).Text = dr["FieldValue"].ToString();
if (tb.GetType() == typeof(DropDownList))
if (((DropDownList) tb).Items.Count >0)
{
try{((DropDownList) tb).Items.FindByValue(dr["FieldValue"].ToString()).Selected = true;}
catch{((DropDownList) tb).Items[0].Selected = true;}
}
}
}
}
finally
{
dr.Close();
}
}
else
{
this.deleteButton.Visible = false;
}
}
}
/// <summary>
/// Set the module guids with free access to this page
/// </summary>
/// <value>The allowed modules.</value>
protected override ArrayList AllowedModules
{
get
{
ArrayList al = new ArrayList();
al.Add ("2502DB18-B580-4F90-8CB4-C15E6E531021");
return al;
}
}
/// <summary>
/// Builds the table.
/// </summary>
private void BuildTable()
{
UserDefinedTableDB objUserDefinedTable = new UserDefinedTableDB();
TableRow objRow;
TableCell objCell;
SqlDataReader dr = objUserDefinedTable.GetUserDefinedFields(ModuleID);
try
{
while (dr.Read())
{
objRow = new TableRow();
objCell = new TableCell();
objCell.Controls.Add(new LiteralControl(dr["FieldTitle"].ToString() + ":"));
objCell.CssClass = "SubHead";
objRow.Cells.Add(objCell);
objCell = new TableCell();
switch(dr["FieldType"].ToString())
{
case "String":
{
TextBox objTextBox = new TextBox();
objTextBox.ID = dr["FieldTitle"].ToString();
objTextBox.Columns = 50;
objTextBox.Rows = 5;
objTextBox.TextMode = TextBoxMode.MultiLine;
objTextBox.CssClass = "NormalTextBox";
objCell.Controls.Add(objTextBox);
}
break;
case "File": case "Image":
{
DropDownList imageList = new DropDownList();
imageList.ID = dr["FieldTitle"].ToString();
//add a default empty entry
imageList.Items.Add(new ListItem("[---------------------------]",string.Empty));
HtmlInputFile fileInputBox = new HtmlInputFile();
fileInputBox.ID = dr["FieldTitle"].ToString()+ "_Upload";
fileInputBox.Size = 30;
string pathToFiles = string.Empty;
string [] fileArray = new string[0];
if (dr["FieldType"].ToString() =="Image")
{
// get the path to the files
pathToFiles = Server.MapPath(((SettingItem) moduleSettings["ImagePath"]).FullPath) + "\\";
// retrieving a list of files for the dropdownbox
fileArray = IOHelper.GetFiles(pathToFiles,"*.jpg;*.png;*.gif");
//set the accept variable on the input element
fileInputBox.Attributes.Add("accept","image/*");
}
else
{
// get the path to the files
pathToFiles = Server.MapPath(((SettingItem) moduleSettings["DocumentPath"]).FullPath) + "\\";
// retrieving a list of files for the dropdownbox
fileArray = IOHelper.GetFiles(pathToFiles,"*.*");
}
//now fill the dropdown box
foreach (string entry in fileArray )
imageList.Items.Add(entry.Substring(entry.LastIndexOf("\\") + 1 ));
imageList.DataBind();
imageList.Attributes.Add("onChange",dr["FieldTitle"].ToString()+ "_Upload.value='';");
objCell.Controls.Add(imageList);
objCell.Controls.Add(new LiteralControl (" "));
fileInputBox.Attributes.Add("onChange",dr["FieldTitle"].ToString()+".selectedIndex=0;");
objCell.Controls.Add(fileInputBox);
}
break;
default:
{
TextBox objTextBox = new TextBox();
objTextBox.ID = dr["FieldTitle"].ToString();
objTextBox.Columns = 50;
objTextBox.CssClass = "NormalTextBox";
objCell.Controls.Add(objTextBox);
}
break;
}
objRow.Cells.Add(objCell);
tblFields.Rows.Add(objRow);
}
}
finally
{
dr.Close();
}
}
/// <summary>
/// The UpdateBtn_Click event handler on this Page is used to either
/// create or update a row. It uses the Rainbow.UserDefinedTableDB()
/// data component to encapsulate all data functionality.
/// </summary>
/// <param name="e">The <see cref="T:System.EventArgs"/> instance containing the event data.</param>
protected override void OnUpdate(EventArgs e)
{
// Calling base we check if the user has rights on updating
base.OnUpdate(e);
UserDefinedTableDB objUserDefinedTable = new UserDefinedTableDB();
bool ValidInput = true;
string strMessage = string.Empty;
SqlDataReader dr = objUserDefinedTable.GetUserDefinedFields(ModuleID);
try
{
while (dr.Read())
{
//if ( Request.Form[prefix + dr["FieldTitle"]] != null && Request.Form[prefix + dr["FieldTitle"]].ToString().Length != 0 )
if ( Request.Form[prefix + dr["FieldTitle"]] != null)
{
switch (dr["FieldType"].ToString())
{
case "Int32":
try
{
int obj = int.Parse(Request.Form[prefix + dr["FieldTitle"].ToString()]);
}
catch
{
strMessage += "<br>" + dr["FieldTitle"].ToString() + " "+General.GetString("UDT_VALIDINTEGER", "must contain a valid integer value");
ValidInput = false;
}
break;
case "Decimal":
try
{
Decimal obj = Decimal.Parse(Request.Form[prefix + dr["FieldTitle"].ToString()]);
}
catch
{
strMessage += "<br>" + dr["FieldTitle"].ToString() + " "+General.GetString("UDT_VALIDDECIMAL", "must contain a valid decimal value");
ValidInput = false;
}
break;
case "DateTime":
try
{
DateTime obj = DateTime.Parse(Request.Form[prefix + dr["FieldTitle"].ToString()]);
}
catch
{
strMessage += "<br>" + dr["FieldTitle"].ToString() + " "+General.GetString("UDT_VALIDDATE", "must contain a valid date value");
ValidInput = false;
}
break;
case "Boolean":
try
{
bool obj = bool.Parse(Request.Form[prefix + dr["FieldTitle"].ToString()]);
}
catch
{
strMessage += "<br>" + dr["FieldTitle"].ToString() + " "+General.GetString("UDT_VALIDBOOLEAN", "must contain a valid true/false value");
ValidInput = false;
}
break;
}
}
}
}
finally
{
dr.Close();
}
if ( ValidInput )
{
if ( UserDefinedRowID == -1 )
UserDefinedRowID = objUserDefinedTable.AddUserDefinedRow(ModuleID, out UserDefinedRowID);
dr = objUserDefinedTable.GetUserDefinedFields(ModuleID);
try
{
while (dr.Read())
{
string fieldValue = Request.Form[prefix + dr["FieldTitle"].ToString()];
if(dr["FieldType"].ToString() == "File" || dr["FieldType"].ToString() == "Image")
{
HtmlInputFile fileControl = (HtmlInputFile)Page.FindControl(prefix + dr["FieldTitle"].ToString()+ "_Upload");
if (fileControl.PostedFile.ContentLength > 0 )
{
fieldValue = fileControl.PostedFile.FileName.Substring(fileControl.PostedFile.FileName.LastIndexOf("\\") + 1);
string pathToSave=string.Empty ;
if (dr["FieldType"].ToString() == "Image")
pathToSave = Server.MapPath(((SettingItem) moduleSettings["ImagePath"]).FullPath) + "\\";
else
pathToSave = Server.MapPath(((SettingItem) moduleSettings["DocumentPath"]).FullPath) + "\\";
try
{
fileControl.PostedFile.SaveAs(pathToSave + fieldValue);
}
catch(DirectoryNotFoundException ex)
{
// If the directory is not found, create and then save
Directory.CreateDirectory(pathToSave);
//System.IO.File.Delete(pathToSave + fieldValue);
fileControl.PostedFile.SaveAs(pathToSave + fieldValue);
//This line is here to supress the warning
ex.ToString();
}
}
}
objUserDefinedTable.UpdateUserDefinedData(UserDefinedRowID, int.Parse(dr["UserDefinedFieldID"].ToString()), fieldValue);
}
}
finally
{
dr.Close();
}
objUserDefinedTable.UpdateUserDefinedRow(UserDefinedRowID);
// Redirect back to the portal home page
this.RedirectBackToReferringPage();
}
else
{
lblMessage.Text = strMessage;
}
}
/// <summary>
/// The DeleteBtn_Click event handler on this Page is used to delete
/// a row. It uses the Rainbow.UserDefinedTableDB() data component to
/// encapsulate all data functionality.
/// </summary>
/// <param name="e">The <see cref="T:System.EventArgs"/> instance containing the event data.</param>
override protected void OnDelete(EventArgs e)
{
// Calling base we check if the user has rights on deleting
base.OnUpdate(e);
if (UserDefinedRowID != -1)
{
UserDefinedTableDB objUserDefinedTable = new UserDefinedTableDB();
objUserDefinedTable.DeleteUserDefinedRow(UserDefinedRowID);
}
// Redirect back to the portal home page
this.RedirectBackToReferringPage();
}
#region Web Form Designer generated code
/// <summary>
/// Raises OnInitEvent
/// </summary>
/// <param name="e">An <see cref="T:System.EventArgs"></see> that contains the event data.</param>
protected override void OnInit(EventArgs e)
{
this.Load += new EventHandler(this.Page_Load);
base.OnInit(e);
}
#endregion
}
}
| |
using JetBrains.Annotations;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Text.RegularExpressions;
namespace ExcelFormulaParser
{
public class ExcelFormula : IList<ExcelFormulaToken>
{
const char QuoteDouble = '"';
const char QuoteSingle = '\'';
const char BracketClose = ']';
const char BracketOpen = '[';
const char BraceOpen = '{';
const char BraceClose = '}';
const char ParenOpen = '(';
const char ParenClose = ')';
const char Semicolon = ';';
const char Whitespace = ' ';
const char Comma = ',';
const char ErrorStart = '#';
const string OperatorsSn = "+-";
const string OperatorsInfix = "+-*/^&=><";
const string OperatorsPostfix = "%";
internal static readonly string[] ExcelErrors = { "#NULL!", "#DIV/0!", "#VALUE!", "#REF!", "#NAME?", "#NUM!", "#N/A" };
internal static readonly string[] ComparatorsMulti = { ">=", "<=", "<>" };
private readonly string _formula;
private List<ExcelFormulaToken> _tokens = new List<ExcelFormulaToken>();
/// <summary>
/// Gets the number of ExcelFormulaToken which are parsed for this ExcelFormula.
/// </summary>
public int Count => _tokens.Count;
/// <summary>
/// Gets a value indicating whether this ExcelFormula is read-only.
/// </summary>
public bool IsReadOnly => true;
/// <summary>
/// The ExcelFormula.
/// </summary>
public string Formula => _formula;
/// <summary>
/// The optional context for this formula, can be anything, e.g. the Sheet or Workbook.
/// </summary>
public IExcelFormulaContext Context { get; }
/// <summary>
/// Gets or sets the ExcelFormulaToken at the specified index.
/// </summary>
/// <param name="index">The zero-based index of the element to get or set.</param>
/// <returns>The ExcelFormulaToken at the specified index.</returns>
public ExcelFormulaToken this[int index]
{
get => _tokens[index];
set => throw new NotSupportedException();
}
/// <summary>
/// Constructs a ExcelFormula.
/// </summary>
/// <param name="formula">The Excel formula.</param>
/// <param name="context">The optional context (can be anything, e.g. the Sheet or Workbook)</param>
public ExcelFormula([NotNull] string formula, [CanBeNull] IExcelFormulaContext context = null)
{
if (string.IsNullOrEmpty(formula))
{
throw new ArgumentException(nameof(formula));
}
_formula = formula.Trim();
Context = context;
ParseToTokens();
}
public int IndexOf(ExcelFormulaToken item)
{
return _tokens.IndexOf(item);
}
public void Insert(int index, ExcelFormulaToken item)
{
throw new NotSupportedException();
}
public void RemoveAt(int index)
{
throw new NotSupportedException();
}
public void Add(ExcelFormulaToken item)
{
throw new NotSupportedException();
}
public void Clear()
{
throw new NotSupportedException();
}
public bool Contains(ExcelFormulaToken item)
{
return _tokens.Contains(item);
}
public bool Remove(ExcelFormulaToken item)
{
throw new NotSupportedException();
}
public void CopyTo(ExcelFormulaToken[] array, int arrayIndex)
{
_tokens.CopyTo(array, arrayIndex);
}
public IEnumerator<ExcelFormulaToken> GetEnumerator()
{
return _tokens.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return GetEnumerator();
}
private void ParseToTokens()
{
// No attempt is made to verify formulas; assumes formulas are derived from Excel, where
// they can only exist if valid; stack overflows/underflows sunk as nulls without exceptions.
if (_formula.Length < 2 || _formula[0] != '=')
{
return;
}
var tokens1 = new ExcelFormulaTokens();
var stack = new ExcelFormulaStack();
bool inString = false;
bool inPath = false;
bool inRange = false;
bool inError = false;
int index = 1;
string value = string.Empty;
while (index < _formula.Length)
{
// state-dependent character evaluation (order is important)
// double-quoted strings
// embeds are doubled
// end marks token
if (inString)
{
if (_formula[index] == QuoteDouble)
{
if (index + 2 <= _formula.Length && _formula[index + 1] == QuoteDouble)
{
value += QuoteDouble;
index++;
}
else
{
inString = false;
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand, ExcelFormulaTokenSubtype.Text));
value = string.Empty;
}
}
else
{
value += _formula[index];
}
index++;
continue;
}
// single-quoted strings (links)
// embeds are double
// end does not mark a token
if (inPath)
{
if (_formula[index] == QuoteSingle)
{
if (index + 2 <= _formula.Length && _formula[index + 1] == QuoteSingle)
{
value += QuoteSingle;
index++;
}
else
{
inPath = false;
}
}
else
{
value += _formula[index];
}
index++;
continue;
}
// bracked strings (R1C1 range index or linked workbook name)
// no embeds (changed to "()" by Excel)
// end does not mark a token
if (inRange)
{
if (_formula[index] == BracketClose)
{
inRange = false;
}
value += _formula[index];
index++;
continue;
}
// error values
// end marks a token, determined from absolute list of values
if (inError)
{
value += _formula[index];
index++;
if (Array.IndexOf(ExcelErrors, value) != -1)
{
inError = false;
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand, ExcelFormulaTokenSubtype.Error));
value = string.Empty;
}
continue;
}
// scientific notation check
if (OperatorsSn.IndexOf(_formula[index]) != -1)
{
if (value.Length > 1)
{
if (Regex.IsMatch(value, @"^[1-9]{1}(\.[0-9]+)?E{1}$"))
{
value += _formula[index];
index++;
continue;
}
}
}
// independent character evaluation (order not important)
// establish state-dependent character evaluations
if (_formula[index] == QuoteDouble)
{
if (value.Length > 0)
{
// unexpected
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Unknown));
value = string.Empty;
}
inString = true;
index++;
continue;
}
if (_formula[index] == QuoteSingle)
{
if (value.Length > 0)
{
// unexpected
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Unknown));
value = string.Empty;
}
inPath = true;
index++;
continue;
}
if (_formula[index] == BracketOpen)
{
inRange = true;
value += BracketOpen;
index++;
continue;
}
if (_formula[index] == ErrorStart)
{
if (value.Length > 0)
{
// unexpected
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Unknown));
value = string.Empty;
}
inError = true;
value += ErrorStart;
index++;
continue;
}
// mark start and end of arrays and array rows
if (_formula[index] == BraceOpen)
{
if (value.Length > 0)
{
// unexpected
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Unknown));
value = string.Empty;
}
stack.Push(tokens1.Add(new ExcelFormulaToken("ARRAY", ExcelFormulaTokenType.Function, ExcelFormulaTokenSubtype.Start)));
stack.Push(tokens1.Add(new ExcelFormulaToken("ARRAYROW", ExcelFormulaTokenType.Function, ExcelFormulaTokenSubtype.Start)));
index++;
continue;
}
if (_formula[index] == Semicolon)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(stack.Pop());
tokens1.Add(new ExcelFormulaToken(",", ExcelFormulaTokenType.Argument));
stack.Push(tokens1.Add(new ExcelFormulaToken("ARRAYROW", ExcelFormulaTokenType.Function, ExcelFormulaTokenSubtype.Start)));
index++;
continue;
}
if (_formula[index] == BraceClose)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(stack.Pop());
tokens1.Add(stack.Pop());
index++;
continue;
}
// trim white-space
if (_formula[index] == Whitespace)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(new ExcelFormulaToken(string.Empty, ExcelFormulaTokenType.Whitespace));
index++;
while (_formula[index] == Whitespace && index < _formula.Length)
{
index++;
}
continue;
}
// multi-character comparators
if (index + 2 <= _formula.Length)
{
if (Array.IndexOf(ComparatorsMulti, _formula.Substring(index, 2)) != -1)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(new ExcelFormulaToken(_formula.Substring(index, 2), ExcelFormulaTokenType.OperatorInfix, ExcelFormulaTokenSubtype.Logical));
index += 2;
continue;
}
}
// standard infix operators
if (OperatorsInfix.IndexOf(_formula[index]) != -1)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(new ExcelFormulaToken(_formula[index].ToString(), ExcelFormulaTokenType.OperatorInfix));
index++;
continue;
}
// standard postfix operators (only one)
if (OperatorsPostfix.IndexOf(_formula[index]) != -1)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(new ExcelFormulaToken(_formula[index].ToString(), ExcelFormulaTokenType.OperatorPostfix));
index++;
continue;
}
// start subexpression or function
if (_formula[index] == ParenOpen)
{
if (value.Length > 0)
{
stack.Push(tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Function, ExcelFormulaTokenSubtype.Start)));
value = string.Empty;
}
else
{
stack.Push(tokens1.Add(new ExcelFormulaToken(String.Empty, ExcelFormulaTokenType.Subexpression, ExcelFormulaTokenSubtype.Start)));
}
index++;
continue;
}
// function, subexpression, or array parameters, or operand unions
if (_formula[index] == Comma)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
if (stack.Current.Type != ExcelFormulaTokenType.Function)
{
tokens1.Add(new ExcelFormulaToken(",", ExcelFormulaTokenType.OperatorInfix, ExcelFormulaTokenSubtype.Union));
}
else
{
tokens1.Add(new ExcelFormulaToken(",", ExcelFormulaTokenType.Argument));
}
index++;
continue;
}
// stop subexpression
if (_formula[index] == ParenClose)
{
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
value = string.Empty;
}
tokens1.Add(stack.Pop());
index++;
continue;
}
// token accumulation
value += _formula[index];
index++;
}
// dump remaining accumulation
if (value.Length > 0)
{
tokens1.Add(new ExcelFormulaToken(value, ExcelFormulaTokenType.Operand));
}
// move tokenList to new set, excluding unnecessary white-space tokens and converting necessary ones to intersections
var tokens2 = new ExcelFormulaTokens();
while (tokens1.MoveNext())
{
ExcelFormulaToken token = tokens1.Current;
if (token == null)
{
continue;
}
if (token.Type != ExcelFormulaTokenType.Whitespace)
{
tokens2.Add(token);
continue;
}
if (tokens1.BOF || tokens1.EOF)
{
continue;
}
ExcelFormulaToken previous = tokens1.Previous;
if (previous == null)
{
continue;
}
if (!(
previous.Type == ExcelFormulaTokenType.Function &&
previous.Subtype == ExcelFormulaTokenSubtype.Stop ||
previous.Type == ExcelFormulaTokenType.Subexpression &&
previous.Subtype == ExcelFormulaTokenSubtype.Stop ||
previous.Type == ExcelFormulaTokenType.Operand
))
{
continue;
}
ExcelFormulaToken next = tokens1.Next;
if (next == null)
{
continue;
}
if (!(
next.Type == ExcelFormulaTokenType.Function && next.Subtype == ExcelFormulaTokenSubtype.Start ||
next.Type == ExcelFormulaTokenType.Subexpression &&
next.Subtype == ExcelFormulaTokenSubtype.Start ||
next.Type == ExcelFormulaTokenType.Operand
)
)
{
continue;
}
tokens2.Add(new ExcelFormulaToken(string.Empty, ExcelFormulaTokenType.OperatorInfix, ExcelFormulaTokenSubtype.Intersection));
}
// move tokens to final list, switching infix "-" operators to prefix when appropriate, switching infix "+" operators
// to noop when appropriate, identifying operand and infix-operator subtypes, and pulling "@" from function names
_tokens = new List<ExcelFormulaToken>(tokens2.Count);
while (tokens2.MoveNext())
{
ExcelFormulaToken token = tokens2.Current;
if (token == null)
{
continue;
}
ExcelFormulaToken previous = tokens2.Previous;
ExcelFormulaToken next = tokens2.Next;
if (token.Type == ExcelFormulaTokenType.OperatorInfix && token.Value == "-")
{
if (tokens2.BOF)
{
token.Type = ExcelFormulaTokenType.OperatorPrefix;
}
else if (
previous.Type == ExcelFormulaTokenType.Function &&
previous.Subtype == ExcelFormulaTokenSubtype.Stop ||
previous.Type == ExcelFormulaTokenType.Subexpression &&
previous.Subtype == ExcelFormulaTokenSubtype.Stop ||
previous.Type == ExcelFormulaTokenType.OperatorPostfix ||
previous.Type == ExcelFormulaTokenType.Operand
)
{
token.Subtype = ExcelFormulaTokenSubtype.Math;
}
else
{
token.Type = ExcelFormulaTokenType.OperatorPrefix;
}
_tokens.Add(token);
continue;
}
if (token.Type == ExcelFormulaTokenType.OperatorInfix && token.Value == "+")
{
if (tokens2.BOF)
{
continue;
}
if (
previous.Type == ExcelFormulaTokenType.Function &&
previous.Subtype == ExcelFormulaTokenSubtype.Stop ||
previous.Type == ExcelFormulaTokenType.Subexpression &&
previous.Subtype == ExcelFormulaTokenSubtype.Stop ||
previous.Type == ExcelFormulaTokenType.OperatorPostfix ||
previous.Type == ExcelFormulaTokenType.Operand
)
{
token.Subtype = ExcelFormulaTokenSubtype.Math;
}
else
{
continue;
}
_tokens.Add(token);
continue;
}
if (token.Type == ExcelFormulaTokenType.OperatorInfix && token.Subtype == ExcelFormulaTokenSubtype.Nothing)
{
if ("<>=".IndexOf(token.Value.Substring(0, 1), StringComparison.Ordinal) != -1)
{
token.Subtype = ExcelFormulaTokenSubtype.Logical;
}
else if (token.Value == "&")
{
token.Subtype = ExcelFormulaTokenSubtype.Concatenation;
}
else
{
token.Subtype = ExcelFormulaTokenSubtype.Math;
}
_tokens.Add(token);
continue;
}
if (token.Type == ExcelFormulaTokenType.Operand && token.Subtype == ExcelFormulaTokenSubtype.Nothing)
{
double d;
bool isNumber = double.TryParse(token.Value, NumberStyles.Any, CultureInfo.InvariantCulture, out d);
if (!isNumber)
{
if (token.Value == "TRUE" || token.Value == "FALSE")
{
token.Subtype = ExcelFormulaTokenSubtype.Logical;
}
else
{
token.Subtype = ExcelFormulaTokenSubtype.Range;
}
}
else
{
token.Subtype = ExcelFormulaTokenSubtype.Number;
}
_tokens.Add(token);
continue;
}
if (token.Type == ExcelFormulaTokenType.Function)
{
if (token.Value.Length > 0)
{
if (token.Value.Substring(0, 1) == "@")
{
token.Value = token.Value.Substring(1);
}
}
}
_tokens.Add(token);
}
}
}
}
| |
#region Copyright (c) 2004 Ian Davis and James Carlyle
/*------------------------------------------------------------------------------
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 2004 Ian Davis and James Carlyle
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
------------------------------------------------------------------------------*/
#endregion
namespace SemPlan.Spiral.Tests.Core {
using NUnit.Framework;
using SemPlan.Spiral.Core;
using SemPlan.Spiral.Expressions;
using SemPlan.Spiral.Utility;
using System;
using System.Collections;
/// <summary>
/// Programmer tests for QuerySolver class
/// </summary>
/// <remarks>
/// $Id: QuerySolverTest.cs,v 1.12 2006/03/08 22:42:36 ian Exp $
///</remarks>
[TestFixture]
public abstract class QuerySolverTest {
public abstract TripleStore MakeNewTripleStore();
public IEnumerator GetSolutions(Query query, TripleStore tripleStore) {
return GetSolutions(query, tripleStore, false);
}
public abstract IEnumerator GetSolutions(Query query, TripleStore tripleStore, bool explain);
[Test]
public void SingleStatementSingleVariable() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj")), solution1["var"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void SingleStatementTwoVariables() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("var1"), new UriRef("http://example.com/property"), new Variable("var2") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void MultipleStatementsSingleVariableSingleSolution() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/other"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/other"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj")), solution1["var"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void MultipleStatementsSingleVariableTwoSolutions() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj1") ) );
statements.Add( new Statement( new UriRef("http://example.com/other"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj2") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
QuerySolution expectedSolution1 = new QuerySolution();
expectedSolution1["var"] = statements.GetResourceDenotedBy(new UriRef("http://example.com/obj1"));
QuerySolution expectedSolution2 = new QuerySolution();
expectedSolution2["var"] = statements.GetResourceDenotedBy(new UriRef("http://example.com/obj2"));
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( false, solutions.MoveNext() );
Assert.IsTrue( expectedSolution1.Equals(solution1) && expectedSolution2.Equals(solution2)
|| expectedSolution1.Equals(solution2) && expectedSolution2.Equals(solution1)
);
statements.Clear();
}
[Test]
public void MultipleStatementsTwoPatternsSingleSolution() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/child1"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum") ) );
statements.Add( new Statement( new UriRef("http://example.com/child1"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/dad") ) );
statements.Add( new Statement( new UriRef("http://example.com/child2"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("child") , new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum")) );
builder.AddPattern( new Pattern( new Variable("child"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/dad")) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("child") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/child1") ), solution1["child"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void MultipleStatementsTwoChainedPatternsSingleSolution() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/child1"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum") ) );
statements.Add( new Statement( new UriRef("http://example.com/mum"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/gran") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("child") , new UriRef("http://example.com/childOf"), new Variable("parent")) );
builder.AddPattern( new Pattern( new Variable("parent"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/gran")) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("child") );
query.AddVariable( new Variable("parent") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/child1") ), solution1["child"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/mum") ), solution1["parent"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void MultipleStatementsTwoChainedPatternsOneSolutionDifferentTripleOrder() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/mum"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/gran") ) );
statements.Add( new Statement( new UriRef("http://example.com/child1"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("child") , new UriRef("http://example.com/childOf"), new Variable("parent")) );
builder.AddPattern( new Pattern( new Variable("parent"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/gran")) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("child") );
query.AddVariable( new Variable("parent") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/child1")), solution1["child"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/mum")), solution1["parent"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void MultipleStatementsTwoChainedPatternsTwoSolutions() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/child2"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum") ) );
statements.Add( new Statement( new UriRef("http://example.com/mum"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/gran") ) );
statements.Add( new Statement( new UriRef("http://example.com/child1"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/mum") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("child") , new UriRef("http://example.com/childOf"), new Variable("parent")) );
builder.AddPattern( new Pattern( new Variable("parent"), new UriRef("http://example.com/childOf"), new UriRef("http://example.com/gran")) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("child") );
query.AddVariable( new Variable("parent") );
QuerySolution expectedSolution1 = new QuerySolution();
expectedSolution1["child"] = statements.GetResourceDenotedBy(new UriRef("http://example.com/child2") );
expectedSolution1["parent"] = statements.GetResourceDenotedBy(new UriRef("http://example.com/mum") );
QuerySolution expectedSolution2 = new QuerySolution();
expectedSolution2["child"] = statements.GetResourceDenotedBy(new UriRef("http://example.com/child1") );
expectedSolution2["parent"] = statements.GetResourceDenotedBy(new UriRef("http://example.com/mum") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( false, solutions.MoveNext() );
Assert.IsTrue( expectedSolution1.Equals(solution1) && expectedSolution2.Equals(solution2)
|| expectedSolution1.Equals(solution2) && expectedSolution2.Equals(solution1)
);
statements.Clear();
}
[Test]
public void MatchingPlainLiterals() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/fred"), new UriRef("http://example.com/name"), new PlainLiteral("Fred") ) );
statements.Add( new Statement( new UriRef("http://example.com/jim"), new UriRef("http://example.com/name"), new PlainLiteral("Jim") ) );
statements.Add( new Statement( new UriRef("http://example.com/jim"), new UriRef("http://example.com/knows"), new UriRef("http://example.com/fred")) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("person") , new UriRef("http://example.com/knows"), new Variable("friend")) );
builder.AddPattern( new Pattern( new Variable("person"), new UriRef("http://example.com/name"), new PlainLiteral("Jim") ) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("friend") );
query.AddVariable( new Variable("person") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/fred") ), solution2["friend"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/jim") ), solution2["person"] );
Assert.AreEqual( false, solutions.MoveNext() );
}
[Test]
public void MatchingLiterals() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/fred"), new UriRef("http://example.com/name"), new TypedLiteral("Fred", "http://example.com/integer") ) );
statements.Add( new Statement( new UriRef("http://example.com/jim"), new UriRef("http://example.com/name"), new PlainLiteral("Jim", "en") ) );
statements.Add( new Statement( new UriRef("http://example.com/jim"), new UriRef("http://example.com/knows"), new UriRef("http://example.com/fred")) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("person") , new UriRef("http://example.com/knows"), new Variable("friend")) );
builder.AddPattern( new Pattern( new Variable("person"), new UriRef("http://example.com/name"), new PlainLiteral("Jim", "en") ) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("friend") );
query.AddVariable( new Variable("person") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext(), "Has first solution" );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/fred") ), solution2["friend"], "?friend is fred" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/jim") ), solution2["person"], "?person is jim" );
Assert.AreEqual( false, solutions.MoveNext(), "Has no further solutions" );
statements.Clear();
}
[Test]
public void SelectingLiterals() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/fred"), new UriRef("http://example.com/name"), new PlainLiteral("Fred") ) );
statements.Add( new Statement( new UriRef("http://example.com/jim"), new UriRef("http://example.com/name"), new PlainLiteral("Jim") ) );
statements.Add( new Statement( new UriRef("http://example.com/jim"), new UriRef("http://example.com/knows"), new UriRef("http://example.com/fred")) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("person") , new UriRef("http://example.com/knows"), new Variable("friend")) );
builder.AddPattern( new Pattern( new Variable("person"), new UriRef("http://example.com/name"), new PlainLiteral("Jim") ) );
builder.AddPattern( new Pattern( new Variable("friend"), new UriRef("http://example.com/name"), new Variable("friendName") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new PlainLiteral("Fred") ), solution1["friendName"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/jim") ), solution1["person"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void OrderOfPatternsIsNotSignificant() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement(new UriRef("http://example.com/subj1"), new UriRef("http://www.w3.org/2000/01/rdf-schema#subClassOf"), new UriRef("http://example.com/subj2")) );
statements.Add( new Statement(new UriRef("http://example.com/subj2"), new UriRef("http://www.w3.org/2000/01/rdf-schema#subClassOf"), new UriRef("http://example.com/subj3")) );
SimpleQueryBuilder builder1 = new SimpleQueryBuilder();
builder1.AddPattern( new Pattern( new Variable("vvv"), new UriRef("http://www.w3.org/2000/01/rdf-schema#subClassOf"), new Variable("xxx") ) );
builder1.AddPattern( new Pattern( new Variable("uuu"), new UriRef("http://www.w3.org/2000/01/rdf-schema#subClassOf"), new Variable("vvv") ) );
Query query1 = builder1.GetQuery();
IEnumerator solutions1 = GetSolutions(query1, statements);
Assert.AreEqual( true, solutions1.MoveNext(), "First pattern ordering gives solution" );
QuerySolution solution1 = (QuerySolution)solutions1.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj1") ), solution1["uuu"], "First pattern ordering has solution for variable uuu" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj3") ), solution1["xxx"], "First pattern ordering has solution for variable xxx" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj2") ), solution1["vvv"], "First pattern ordering has solution for variable vvv" );
Assert.AreEqual( false, solutions1.MoveNext(), "First pattern has no further solution" );
// Same query but order of patterns reversed
SimpleQueryBuilder builder2 = new SimpleQueryBuilder();
builder2.AddPattern( new Pattern( new Variable("uuu"), new UriRef("http://www.w3.org/2000/01/rdf-schema#subClassOf"), new Variable("vvv") ) );
builder2.AddPattern( new Pattern( new Variable("vvv"), new UriRef("http://www.w3.org/2000/01/rdf-schema#subClassOf"), new Variable("xxx") ) );
Query query2 = builder2.GetQuery();
IEnumerator solutions2 = GetSolutions(query2, statements);
Assert.AreEqual( true, solutions2.MoveNext(), "Reversed pattern ordering gives solution" );
QuerySolution solution2 = (QuerySolution)solutions2.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj1") ), solution2["uuu"], "Reversed pattern ordering has solution for variable uuu" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj3") ), solution2["xxx"], "Reversed pattern ordering has solution for variable xxx" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj2") ), solution2["vvv"], "Reversed pattern ordering has solution for variable vvv" );
Assert.AreEqual( false, solutions2.MoveNext() , "Reversed pattern has no further solution" );
statements.Clear();
}
[Test]
public void SolverUsesSingleDistinctVariableForSinglePattern() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj1"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj2"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj3"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("obj") );
query.IsDistinct = true;
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj")), solution1["obj"] );
Assert.AreEqual( false, solutions.MoveNext(), "Should be only a single solution" );
statements.Clear();
}
[Test]
public void SolverUsesSingleDistinctVariableForMultiplePatterns() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj1"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj2"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/obj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj2") ) );
statements.Add( new Statement( new UriRef("http://example.com/obj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj3") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
builder.AddPattern( new Pattern( new Variable("obj"), new UriRef("http://example.com/property"), new Variable("obj2") ) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("obj") );
query.IsDistinct = true;
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj")), solution1["obj"] );
Assert.AreEqual( false, solutions.MoveNext(), "Should be only a single solution" );
statements.Clear();
}
[Test]
public void NoSolutions() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj2"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void NoPatterns() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void OptionalPatternMatchesIfPossible() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/other"), new UriRef("http://example.com/obj2") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
builder.AddOptional( new Pattern( new Variable("subj"), new UriRef("http://example.com/other"), new Variable("obj2") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj") ), solution1["subj"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj") ), solution1["obj"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj2") ), solution1["obj2"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void OptionalPatternDoesntFailQueryIfNoMatch() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
builder.AddOptional( new Pattern( new Variable("subj"), new UriRef("http://example.com/other"), new Variable("obj2") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj") ), solution1["subj"] );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj") ), solution1["obj"] );
Assert.AreEqual( false, solution1.IsBound("obj2") );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void OnlySelectedVariablesAreAvailableInSolution() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj1"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj2"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj3"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("obj") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( false, solution1.IsBound("subj") );
statements.Clear();
}
[Test]
public void QueriesInvolvingUnknownResourcesGiveNoResults() {
TripleStore statements = MakeNewTripleStore();
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern(new Pattern(new Variable("sub"), new UriRef("ex:foo"), new Variable("obj")));
builder.AddPattern(new Pattern(new Variable("obj"), new UriRef("ex:foo2"), new Variable("obj2")));
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.IsNotNull( solutions );
Assert.IsFalse( solutions.MoveNext() );
}
[Test]
public void SolverPopulatesNodesInQuerySolution() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
solutions.MoveNext();
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( new UriRef("http://example.com/obj"), solution1.GetNode("var") );
}
[Test]
public void AllPatternsInOptionalGroupCanMatch() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property2"), new UriRef("http://example.com/obj2") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property3"), new UriRef("http://example.com/obj3") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
builder.AddOptional( new Pattern( new Variable("subj"), new UriRef("http://example.com/property2"), new Variable("obj2") ) );
builder.AddOptional( new Pattern( new Variable("subj"), new UriRef("http://example.com/property3"), new Variable("obj3") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( new UriRef("http://example.com/obj"), solution1.GetNode("obj") );
Assert.AreEqual( new UriRef("http://example.com/obj2"), solution1.GetNode("obj2") );
Assert.AreEqual( new UriRef("http://example.com/obj3"), solution1.GetNode("obj3") );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void AllOrNoPatternsInOptionalGroupMustMatch() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property2"), new UriRef("http://example.com/obj2") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
builder.AddOptional( new Pattern( new Variable("subj"), new UriRef("http://example.com/property2"), new Variable("obj2") ) );
builder.AddOptional( new Pattern( new Variable("subj"), new UriRef("http://example.com/property3"), new Variable("obj3") ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( new UriRef("http://example.com/obj"), solution1.GetNode("obj") );
Assert.AreEqual( false, solution1.IsBound("obj2") );
Assert.AreEqual( false, solution1.IsBound("obj3") );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void AlternatePatternMatchesIfPossible() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/other"), new UriRef("http://example.com/obj2") ) );
Query query = new Query();
QueryGroupOr alternates = new QueryGroupOr();
query.QueryGroup = alternates;
QueryGroupAnd and1 = new QueryGroupAnd();
alternates.Add( and1 );
QueryGroupPatterns patterns1 = new QueryGroupPatterns();
patterns1.Add( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
and1.Add( patterns1 );
QueryGroupAnd and2 = new QueryGroupAnd();
alternates.Add( and2 );
QueryGroupPatterns patterns2 = new QueryGroupPatterns();
patterns2.Add( new Pattern( new Variable("subj"), new UriRef("http://example.com/other"), new Variable("obj2") ) );
and2.Add( patterns2 );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext(), "Should have first solution" );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj") ), solution1["subj"], "First solution should have binding for subj" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj") ), solution1["obj"], "First solution should have binding for obj" );
Assert.AreEqual( false, solution1.IsBound("obj2"), "First solution should not have binding for obj2" );
Assert.AreEqual( true, solutions.MoveNext(), "Should have second solution" );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj") ), solution2["subj"], "Second solution should have binding for subj" );
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj2") ), solution2["obj2"], "Second solution should have binding for obj2" );
Assert.AreEqual( false, solution2.IsBound("obj"), "Second solution should not have binding for obj" );
Assert.AreEqual( false, solutions.MoveNext(), "Should not have third solution" );
statements.Clear();
}
[Test]
public void AlternatePatternDoesntFailQueryIfNoMatch() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
Query query = new Query();
QueryGroupOr alternates = new QueryGroupOr();
query.QueryGroup = alternates;
QueryGroupAnd and1 = new QueryGroupAnd();
alternates.Add( and1 );
QueryGroupPatterns patterns1 = new QueryGroupPatterns();
patterns1.Add( new Pattern( new Variable("subj"), new UriRef("http://example.com/property"), new Variable("obj") ) );
and1.Add( patterns1 );
QueryGroupAnd and2 = new QueryGroupAnd();
alternates.Add( and2 );
QueryGroupPatterns patterns2 = new QueryGroupPatterns();
patterns2.Add( new Pattern( new Variable("subj"), new UriRef("http://example.com/other"), new Variable("obj2") ) );
and2.Add( patterns2 );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/subj") ), solution1["subj"], "Should have binding for subj");
Assert.AreEqual( statements.GetResourceDenotedBy(new UriRef("http://example.com/obj") ), solution1["obj"], "Should have binding for obj" );
Assert.AreEqual( false, solution1.IsBound("obj2"), "Should not have binding for obj2" );
Assert.AreEqual( false, solutions.MoveNext(), "Should not have second solution" );
statements.Clear();
}
[Test]
public void SingleConstraint() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new UriRef("http://example.com/obj") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("foo") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
builder.AddConstraint( new Constraint( new IsLiteral( new VariableExpression( new Variable("var") ) ) ) );
Query query = builder.GetQuery();
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( statements.GetResourceDenotedBy(new PlainLiteral("foo")), solution1["var"] );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
/**
<remarks>
Based on this example from the spec
PREFIX foaf: <http://xmlns.com/foaf/0.1/>
PREFIX vcard: <http://www.w3.org/2001/vcard-rdf/3.0#>
SELECT ?foafName ?mbox ?gname ?fname
WHERE
{ ?x foaf:name ?foafName .
OPTIONAL { ?x foaf:mbox ?mbox } .
OPTIONAL { ?x vcard:N ?vc .
?vc vcard:Given ?gname .
OPTIONAL { ?vc vcard:Family ?fname }
}
}
</remarks>
*/
[Test]
public void NestedOptionals() {
TripleStore statements = MakeNewTripleStore();
BlankNode nodeA = new BlankNode();
BlankNode nodeB = new BlankNode();
BlankNode nodeC = new BlankNode();
BlankNode nodeD = new BlankNode();
BlankNode nodeE = new BlankNode();
BlankNode nodeF = new BlankNode();
statements.Add( new Statement( nodeA, new UriRef("http://xmlns.com/foaf/0.1/name"), new PlainLiteral("Alice") ) );
statements.Add( new Statement( nodeA, new UriRef("http://xmlns.com/foaf/0.1/mbox"), new UriRef("mailto:[email protected]") ) );
statements.Add( new Statement( nodeA, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#N"), nodeB) );
statements.Add( new Statement( nodeB, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#Family"), new PlainLiteral("Hacker") ) );
statements.Add( new Statement( nodeB, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#Given"), new PlainLiteral("Alice") ) );
statements.Add( new Statement( nodeC, new UriRef("http://xmlns.com/foaf/0.1/name"), new PlainLiteral("Bob") ) );
statements.Add( new Statement( nodeC, new UriRef("http://xmlns.com/foaf/0.1/mbox"), new UriRef("mailto:[email protected]") ) );
statements.Add( new Statement( nodeC, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#N"), nodeD) );
statements.Add( new Statement( nodeD, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#Family"), new PlainLiteral("Hacker") ) );
statements.Add( new Statement( nodeE, new UriRef("http://xmlns.com/foaf/0.1/name"), new PlainLiteral("Ella") ) );
statements.Add( new Statement( nodeE, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#N"), nodeF) );
statements.Add( new Statement( nodeF, new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#Given"), new PlainLiteral("Eleanor") ) );
Query query = new Query();
query.OrderBy = new VariableExpression( new Variable("foafName" ) );
QueryGroupAnd groupAnd = new QueryGroupAnd();
QueryGroupPatterns groupRequired = new QueryGroupPatterns();
groupRequired.Add( new Pattern( new Variable("x"), new UriRef("http://xmlns.com/foaf/0.1/name"), new Variable("foafName") ) );
QueryGroupPatterns groupOptional = new QueryGroupPatterns();
groupOptional.Add( new Pattern( new Variable("x"), new UriRef("http://xmlns.com/foaf/0.1/mbox"), new Variable("mbox") ) );
groupAnd.Add( groupRequired );
groupAnd.Add( new QueryGroupOptional( groupOptional ) );
QueryGroupPatterns groupOptional2 = new QueryGroupPatterns();
groupOptional2.Add( new Pattern( new Variable("x"), new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#N"), new Variable("vc") ) );
groupOptional2.Add( new Pattern( new Variable("vc"), new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#Given"), new Variable("gname") ) );
QueryGroupPatterns groupOptional3 = new QueryGroupPatterns();
groupOptional3.Add( new Pattern( new Variable("vc"), new UriRef("http://www.w3.org/2001/vcard-rdf/3.0#Family"), new Variable("fname") ) );
QueryGroupAnd groupAnd3 = new QueryGroupAnd();
groupAnd3.Add( groupOptional2 );
groupAnd3.Add( new QueryGroupOptional( groupOptional3 ) );
QueryGroupAnd groupAndRoot = new QueryGroupAnd();
groupAndRoot.Add( groupAnd );
groupAndRoot.Add( new QueryGroupOptional( groupAnd3 ) );
query.QueryGroup = groupAndRoot;
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( new PlainLiteral("Alice"), solution1.GetNode("foafName") );
Assert.AreEqual( new UriRef("mailto:[email protected]"), solution1.GetNode("mbox") );
Assert.AreEqual( new PlainLiteral("Alice"), solution1.GetNode("gname") );
Assert.AreEqual( new PlainLiteral("Hacker"), solution1.GetNode("fname") );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( new PlainLiteral("Bob"), solution2.GetNode("foafName") );
Assert.AreEqual( new UriRef("mailto:[email protected]"), solution2.GetNode("mbox") );
Assert.AreEqual( false, solution2.IsBound("gname") );
Assert.AreEqual( false, solution2.IsBound("fname") );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution3 = (QuerySolution)solutions.Current;
Assert.AreEqual( new PlainLiteral("Ella"), solution3.GetNode("foafName") );
Assert.AreEqual( false, solution3.IsBound("mbox") );
Assert.AreEqual( new PlainLiteral("Eleanor"), solution3.GetNode("gname") );
Assert.AreEqual( false, solution3.IsBound("fname") );
Assert.AreEqual( false, solutions.MoveNext() );
statements.Clear();
}
[Test]
public void ConstraintAppliesToOptionalBlock() {
TripleStore statements = MakeNewTripleStore();
BlankNode nodeA = new BlankNode();
BlankNode nodeB = new BlankNode();
statements.Add( new Statement( nodeA, new UriRef("http://xmlns.com/foaf/0.1/givenName"), new PlainLiteral("Alice") ) );
statements.Add( new Statement( nodeB, new UriRef("http://xmlns.com/foaf/0.1/givenName"), new PlainLiteral("Bob") ) );
statements.Add( new Statement( nodeB, new UriRef("http://purl.org/dc/elements/1.1/date"), new PlainLiteral("2005-04-04T04:04:04Z") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
Query query = new Query();
QueryGroupAnd groupAnd = new QueryGroupAnd();
QueryGroupPatterns groupRequired = new QueryGroupPatterns();
QueryGroupPatterns groupOptional = new QueryGroupPatterns();
QueryGroupConstraints groupConstraints = new QueryGroupConstraints();
query.AddVariable( new Variable("name") );
groupRequired.Add( new Pattern( new Variable("x"), new UriRef("http://xmlns.com/foaf/0.1/givenName"), new Variable("name") ) );
groupOptional.Add( new Pattern( new Variable("x"), new UriRef("http://purl.org/dc/elements/1.1/date"), new Variable("date") ) );
groupConstraints.Add( new Constraint( new Bound( new Variable("date") ) ) );
groupAnd.Add( groupRequired );
groupAnd.Add( groupOptional );
groupAnd.Add( groupConstraints );
query.QueryGroup = groupAnd;
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( new PlainLiteral("Bob"), solution1.GetNode("name") );
Assert.AreEqual( false, solutions.MoveNext(), "Should be no further solutions" );
}
[Test]
public void OptionalBlockMatchesAllPossible() {
TripleStore statements = MakeNewTripleStore();
BlankNode nodeA = new BlankNode();
BlankNode nodeB = new BlankNode();
statements.Add( new Statement( nodeA, new UriRef("http://xmlns.com/foaf/0.1/givenName"), new PlainLiteral("Alice") ) );
statements.Add( new Statement( nodeB, new UriRef("http://xmlns.com/foaf/0.1/givenName"), new PlainLiteral("Bob") ) );
statements.Add( new Statement( nodeB, new UriRef("http://purl.org/dc/elements/1.1/date"), new PlainLiteral("2005-04-04T04:04:04Z") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new Variable("x"), new UriRef("http://xmlns.com/foaf/0.1/givenName"), new Variable("name") ) );
builder.AddOptional( new Pattern( new Variable("x"), new UriRef("http://purl.org/dc/elements/1.1/date"), new Variable("date") ) );
Query query = builder.GetQuery();
query.AddVariable( new Variable("name") );
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( true, solutions.MoveNext() , "Should have second solution");
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( false, solutions.MoveNext(), "Should be no further solutions" );
Assert.IsTrue(
( solution1.GetNode("name").Equals( new PlainLiteral("Bob") ) && solution2.GetNode("name").Equals( new PlainLiteral("Alice") ) )
||
( solution1.GetNode("name").Equals( new PlainLiteral("Alice") ) && solution2.GetNode("name").Equals( new PlainLiteral("Bob") ) )
);
}
[Test]
public void OrderBySimpleVariableExpressionAscending() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("c") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("a") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("b") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
query.OrderBy = new VariableExpression( new Variable("var" ) );
query.OrderDirection = Query.SortOrder.Ascending;
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( "a", solution1.GetNode("var").GetLabel() );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( "b", solution2.GetNode("var").GetLabel() );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution3 = (QuerySolution)solutions.Current;
Assert.AreEqual( "c", solution3.GetNode("var").GetLabel() );
statements.Clear();
}
[Test]
public void OrderBySimpleVariableExpressionDescending() {
TripleStore statements = MakeNewTripleStore();
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("c") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("a") ) );
statements.Add( new Statement( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new PlainLiteral("b") ) );
SimpleQueryBuilder builder = new SimpleQueryBuilder();
builder.AddPattern( new Pattern( new UriRef("http://example.com/subj"), new UriRef("http://example.com/property"), new Variable("var") ) );
Query query = builder.GetQuery();
query.OrderBy = new VariableExpression( new Variable("var" ) );
query.OrderDirection = Query.SortOrder.Descending;
IEnumerator solutions = GetSolutions(query, statements);
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution1 = (QuerySolution)solutions.Current;
Assert.AreEqual( "c", solution1.GetNode("var").GetLabel() );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution2 = (QuerySolution)solutions.Current;
Assert.AreEqual( "b", solution2.GetNode("var").GetLabel() );
Assert.AreEqual( true, solutions.MoveNext() );
QuerySolution solution3 = (QuerySolution)solutions.Current;
Assert.AreEqual( "a", solution3.GetNode("var").GetLabel() );
statements.Clear();
}
}
}
| |
using System;
using ChainUtils.BouncyCastle.Crypto.Modes;
using ChainUtils.BouncyCastle.Crypto.Paddings;
using ChainUtils.BouncyCastle.Crypto.Parameters;
namespace ChainUtils.BouncyCastle.Crypto.Macs
{
/**
* CMAC - as specified at www.nuee.nagoya-u.ac.jp/labs/tiwata/omac/omac.html
* <p>
* CMAC is analogous to OMAC1 - see also en.wikipedia.org/wiki/CMAC
* </p><p>
* CMAC is a NIST recomendation - see
* csrc.nist.gov/CryptoToolkit/modes/800-38_Series_Publications/SP800-38B.pdf
* </p><p>
* CMAC/OMAC1 is a blockcipher-based message authentication code designed and
* analyzed by Tetsu Iwata and Kaoru Kurosawa.
* </p><p>
* CMAC/OMAC1 is a simple variant of the CBC MAC (Cipher Block Chaining Message
* Authentication Code). OMAC stands for One-Key CBC MAC.
* </p><p>
* It supports 128- or 64-bits block ciphers, with any key size, and returns
* a MAC with dimension less or equal to the block size of the underlying
* cipher.
* </p>
*/
public class CMac
: IMac
{
private const byte CONSTANT_128 = (byte)0x87;
private const byte CONSTANT_64 = (byte)0x1b;
private byte[] ZEROES;
private byte[] mac;
private byte[] buf;
private int bufOff;
private IBlockCipher cipher;
private int macSize;
private byte[] L, Lu, Lu2;
/**
* create a standard MAC based on a CBC block cipher (64 or 128 bit block).
* This will produce an authentication code the length of the block size
* of the cipher.
*
* @param cipher the cipher to be used as the basis of the MAC generation.
*/
public CMac(
IBlockCipher cipher)
: this(cipher, cipher.GetBlockSize() * 8)
{
}
/**
* create a standard MAC based on a block cipher with the size of the
* MAC been given in bits.
* <p/>
* Note: the size of the MAC must be at least 24 bits (FIPS Publication 81),
* or 16 bits if being used as a data authenticator (FIPS Publication 113),
* and in general should be less than the size of the block cipher as it reduces
* the chance of an exhaustive attack (see Handbook of Applied Cryptography).
*
* @param cipher the cipher to be used as the basis of the MAC generation.
* @param macSizeInBits the size of the MAC in bits, must be a multiple of 8 and @lt;= 128.
*/
public CMac(
IBlockCipher cipher,
int macSizeInBits)
{
if ((macSizeInBits % 8) != 0)
throw new ArgumentException("MAC size must be multiple of 8");
if (macSizeInBits > (cipher.GetBlockSize() * 8))
{
throw new ArgumentException(
"MAC size must be less or equal to "
+ (cipher.GetBlockSize() * 8));
}
if (cipher.GetBlockSize() != 8 && cipher.GetBlockSize() != 16)
{
throw new ArgumentException(
"Block size must be either 64 or 128 bits");
}
this.cipher = new CbcBlockCipher(cipher);
macSize = macSizeInBits / 8;
mac = new byte[cipher.GetBlockSize()];
buf = new byte[cipher.GetBlockSize()];
ZEROES = new byte[cipher.GetBlockSize()];
bufOff = 0;
}
public string AlgorithmName
{
get { return cipher.AlgorithmName; }
}
private static int ShiftLeft(byte[] block, byte[] output)
{
var i = block.Length;
uint bit = 0;
while (--i >= 0)
{
uint b = block[i];
output[i] = (byte)((b << 1) | bit);
bit = (b >> 7) & 1;
}
return (int)bit;
}
private static byte[] DoubleLu(byte[] input)
{
var ret = new byte[input.Length];
var carry = ShiftLeft(input, ret);
int xor = input.Length == 16 ? CONSTANT_128 : CONSTANT_64;
/*
* NOTE: This construction is an attempt at a constant-time implementation.
*/
ret[input.Length - 1] ^= (byte)(xor >> ((1 - carry) << 3));
return ret;
}
public void Init(
ICipherParameters parameters)
{
if (parameters is KeyParameter)
{
cipher.Init(true, parameters);
//initializes the L, Lu, Lu2 numbers
L = new byte[ZEROES.Length];
cipher.ProcessBlock(ZEROES, 0, L, 0);
Lu = DoubleLu(L);
Lu2 = DoubleLu(Lu);
}
else if (parameters != null)
{
// CMAC mode does not permit IV to underlying CBC mode
throw new ArgumentException("CMac mode only permits key to be set.", "parameters");
}
Reset();
}
public int GetMacSize()
{
return macSize;
}
public void Update(
byte input)
{
if (bufOff == buf.Length)
{
cipher.ProcessBlock(buf, 0, mac, 0);
bufOff = 0;
}
buf[bufOff++] = input;
}
public void BlockUpdate(
byte[] inBytes,
int inOff,
int len)
{
if (len < 0)
throw new ArgumentException("Can't have a negative input length!");
var blockSize = cipher.GetBlockSize();
var gapLen = blockSize - bufOff;
if (len > gapLen)
{
Array.Copy(inBytes, inOff, buf, bufOff, gapLen);
cipher.ProcessBlock(buf, 0, mac, 0);
bufOff = 0;
len -= gapLen;
inOff += gapLen;
while (len > blockSize)
{
cipher.ProcessBlock(inBytes, inOff, mac, 0);
len -= blockSize;
inOff += blockSize;
}
}
Array.Copy(inBytes, inOff, buf, bufOff, len);
bufOff += len;
}
public int DoFinal(
byte[] outBytes,
int outOff)
{
var blockSize = cipher.GetBlockSize();
byte[] lu;
if (bufOff == blockSize)
{
lu = Lu;
}
else
{
new ISO7816d4Padding().AddPadding(buf, bufOff);
lu = Lu2;
}
for (var i = 0; i < mac.Length; i++)
{
buf[i] ^= lu[i];
}
cipher.ProcessBlock(buf, 0, mac, 0);
Array.Copy(mac, 0, outBytes, outOff, macSize);
Reset();
return macSize;
}
/**
* Reset the mac generator.
*/
public void Reset()
{
/*
* clean the buffer.
*/
Array.Clear(buf, 0, buf.Length);
bufOff = 0;
/*
* Reset the underlying cipher.
*/
cipher.Reset();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Text;
using System.IO;
using System.Windows.Forms;
#region Google_Documents_List5 specific imports
using Google.GData.Client;
using Google.GData.Extensions;
using Google.GData.Documents;
using Google.GData.Tools;
using Google.Documents;
#endregion
namespace Google_DocumentsList
{
public partial class Form1 : Form
{
private List<Document> all = new List<Document>();
private DocumentsRequest request = null;
public Form1()
{
InitializeComponent();
GoogleClientLogin loginDialog = new GoogleClientLogin(new DocumentsService("GoogleDocumentsSample"), "[email protected]");
if (loginDialog.ShowDialog() == DialogResult.OK)
{
RequestSettings settings = new RequestSettings("GoogleDocumentsSample", loginDialog.Credentials);
settings.AutoPaging = true;
settings.PageSize = 100;
if (settings != null)
{
this.request = new DocumentsRequest(settings);
this.Text = "Successfully logged in";
Feed<Document> feed = this.request.GetEverything();
// this takes care of paging the results in
foreach (Document entry in feed.Entries)
{
all.Add(entry);
}
TreeNode noFolder = null;
noFolder = new TreeNode("Items with no folder");
this.documentsView.Nodes.Add(noFolder);
noFolder.SelectedImageIndex = 0;
noFolder.ImageIndex = 0;
foreach (Document entry in all)
{
// let's add those with no parents for the toplevel
if (entry.ParentFolders.Count == 0)
{
if (entry.Type != Document.DocumentType.Folder)
{
AddToTreeView(noFolder.Nodes, entry);
}
else
{
TreeNode n = AddToTreeView(this.documentsView.Nodes, entry);
AddAllChildren(n.Nodes, entry);
}
}
}
}
}
}
private void AddAllChildren(TreeNodeCollection col, Document entry)
{
foreach (Document d in this.all)
{
if (d.ParentFolders.Contains(entry.Self))
{
TreeNode n = AddToTreeView(col, d);
AddAllChildren(n.Nodes, d);
}
}
}
private TreeNode FindEntry(TreeNodeCollection coll, Document entry)
{
foreach (TreeNode n in coll)
{
// title is not specific enough
Document d = n.Tag as Document;
if (d.Id == entry.Id)
return n;
TreeNode x = FindEntry(n.Nodes, entry);
if (x != null)
return x;
}
return null;
}
private TreeNode AddToTreeView(TreeNodeCollection parent, Document doc)
{
TreeNode node = new TreeNode(doc.Title);
node.Tag = doc;
if (doc.Type != Document.DocumentType.Folder)
{
node.ImageIndex = 2;
node.SelectedImageIndex = 2;
}
else
{
node.ImageIndex = 0;
node.SelectedImageIndex = 0;
}
parent.Add(node);
return node;
}
private TreeNodeCollection FindParentTreeNode(TreeNodeCollection coll, Document doc)
{
foreach (TreeNode n in coll)
{
Document d = n.Tag as Document;
if (doc.ParentFolders.Contains(d.Self))
{
// found it.
return n.Nodes;
}
TreeNodeCollection x = FindParentTreeNode(n.Nodes, doc);
if (x != null)
return x;
}
return null;
}
private TreeNodeCollection CreateParentTreeNode(Document doc)
{
TreeNode ret = null;
foreach (Document d in this.all)
{
if (doc.ParentFolders.Contains(d.Self))
{
TreeNodeCollection parent = null;
if (d.ParentFolders.Count != 0)
{
parent = FindParentTreeNode(this.documentsView.Nodes, d);
}
ret = AddToTreeView(parent == null ? this.documentsView.Nodes : parent, d);
return ret.Nodes;
}
}
return this.documentsView.Nodes;
}
private void documentsView_AfterExpand(object sender, TreeViewEventArgs e)
{
TreeNode node = e.Node;
Document d = node.Tag as Document;
Document.DocumentType type = d == null ? Document.DocumentType.Folder : d.Type;
if (node.Nodes.Count > 0 && type == Document.DocumentType.Folder)
{
node.SelectedImageIndex = 1;
node.ImageIndex = 1;
}
}
private void documentsView_AfterCollapse(object sender, TreeViewEventArgs e)
{
TreeNode node = e.Node;
Document d = node.Tag as Document;
Document.DocumentType type = d == null ? Document.DocumentType.Folder : d.Type;
if (node.Nodes.Count > 0 && type == Document.DocumentType.Folder)
{
node.SelectedImageIndex = 0;
node.ImageIndex = 0;
}
}
private void documentsView_AfterSelect(object sender, TreeViewEventArgs e)
{
TreeNode node = e.Node;
Document d = node.Tag as Document;
if (d != null && d.Type != Document.DocumentType.Folder)
{
this.propertyGrid1.SelectedObject = d;
if (d.Type == Document.DocumentType.PDF)
{
this.Export.Enabled = this.ShowRevisions.Enabled = false;
}
else
{
this.Export.Enabled = this.ShowRevisions.Enabled = true;
}
}
else
{
this.propertyGrid1.SelectedObject = null;
this.Export.Enabled = false;
}
}
private void Export_Click(object sender, EventArgs e)
{
TreeNode node = this.documentsView.SelectedNode;
if (node == null)
return;
Document d = node.Tag as Document;
// fill the filter based on the document type
switch (d.Type)
{
case Document.DocumentType.Presentation:
this.exportDialog.Filter = "PDF|*.pdf|Flash|*.swf|Powerpoint|*.ppt";
break;
case Document.DocumentType.Spreadsheet:
this.exportDialog.Filter = "PDF|*.pdf|HTML|*.html|Excel|*.xls|Comma seperated|*.csv|Open Document Spreadsheet|*.ods|Tab seperated|*.tsv";
break;
case Document.DocumentType.PDF:
return;
default:
this.exportDialog.Filter = "PDF|*.pdf|HTML|*.html|Text|*.txt|Open Document|*.ods|Rich Text|*.rtf|Microsoft Word|*.doc|Portable Networks Graphics|*.png";
break;
}
if (this.exportDialog.ShowDialog() == DialogResult.OK)
{
Document.DownloadType type = Document.DownloadType.pdf;
switch (d.Type)
{
case Document.DocumentType.Presentation:
switch (this.exportDialog.FilterIndex)
{
case 2:
type = Document.DownloadType.swf;
break;
case 3:
type = Document.DownloadType.ppt;
break;
}
break;
case Document.DocumentType.Spreadsheet:
switch (this.exportDialog.FilterIndex)
{
case 2:
type = Document.DownloadType.html;
break;
case 3:
type = Document.DownloadType.xls;
break;
case 4:
type = Document.DownloadType.csv;
break;
case 5:
type = Document.DownloadType.ods;
break;
case 6:
type = Document.DownloadType.tsv;
break;
}
break;
default:
switch (this.exportDialog.FilterIndex)
{
case 2:
type = Document.DownloadType.html;
break;
case 3:
type = Document.DownloadType.txt;
break;
case 4:
type = Document.DownloadType.ods;
break;
case 5:
type = Document.DownloadType.rtf;
break;
case 6:
type = Document.DownloadType.doc;
break;
case 7:
type = Document.DownloadType.png;
break;
}
break;
}
Stream stream = this.request.Download(d, type);
Stream file = this.exportDialog.OpenFile();
if (file != null)
{
int nBytes = 2048;
int count = 0;
Byte[] arr = new Byte[nBytes];
do
{
count = stream.Read(arr, 0, nBytes);
file.Write(arr, 0, count);
} while (count > 0);
file.Flush();
file.Close();
}
stream.Close();
}
}
private void ShowRevisions_Click(object sender, EventArgs e)
{
TreeNode documentNode = this.documentsView.SelectedNode;
if (documentNode == null)
return;
Document d = documentNode.Tag as Document;
if (d.Type == Document.DocumentType.Folder)
return;
// fill the filter based on the document type
// so we have a document and a revision link, get that feed
Feed<Document> revFeed = this.Request.Get<Document>(d.RevisionDocument);
foreach (Document doc in revFeed.Entries)
{
TreeNode node = new TreeNode(doc.Title);
node.Tag = doc;
node.ImageIndex = 2;
node.SelectedImageIndex = 2;
TreeNode author = new TreeNode(doc.Author + ": " + doc.Updated);
author.ImageIndex = 2;
author.SelectedImageIndex = 2;
node.Nodes.Add(author);
author.Tag = doc;
documentNode.Nodes.Add(node);
documentNode.ExpandAll();
}
}
public DocumentsRequest Request
{
get
{
return this.request;
}
}
private void showPDFs_Click(object sender, EventArgs e)
{
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.IO;
using System.Net;
using System.Web;
using ServiceStack.Text;
using ServiceStack.Common.Utils;
using ServiceStack.Common.Web;
using ServiceStack.DependencyInjection;
using ServiceStack.ServiceHost;
namespace ServiceStack.WebHost.Endpoints.Extensions
{
public class HttpRequestWrapper
: IHttpRequest
{
private static readonly string physicalFilePath;
public DependencyService DependencyService { get; set; }
private readonly HttpRequest request;
static HttpRequestWrapper()
{
physicalFilePath = "~".MapHostAbsolutePath();
}
public HttpRequest Request
{
get { return request; }
}
public object OriginalRequest
{
get { return request; }
}
public HttpRequestWrapper(HttpRequest request)
: this(null, request)
{
}
public HttpRequestWrapper(string operationName, HttpRequest request)
{
this.OperationName = operationName;
this.request = request;
this.DependencyService = DependencyService;
}
public T TryResolve<T>()
{
return DependencyService == null
? EndpointHost.AppHost.TryResolve<T>()
: DependencyService.TryResolve<T>();
}
public string OperationName { get; set; }
public string ContentType
{
get { return request.ContentType; }
}
private string httpMethod;
public string HttpMethod
{
get
{
return httpMethod
?? (httpMethod = Param(HttpHeaders.XHttpMethodOverride)
?? request.HttpMethod);
}
}
public string Param(string name)
{
return Headers[name]
?? QueryString[name]
?? FormData[name];
}
public bool IsLocal
{
get { return request.IsLocal; }
}
public string UserAgent
{
get { return request.UserAgent; }
}
private Dictionary<string, object> items;
public Dictionary<string, object> Items
{
get
{
if (items == null)
{
items = new Dictionary<string, object>();
}
return items;
}
}
private string responseContentType;
public string ResponseContentType
{
get
{
if (responseContentType == null)
{
responseContentType = this.GetResponseContentType();
}
return responseContentType;
}
set
{
this.responseContentType = value;
}
}
private Dictionary<string, Cookie> cookies;
public IDictionary<string, Cookie> Cookies
{
get
{
if (cookies == null)
{
cookies = new Dictionary<string, Cookie>();
for (var i = 0; i < this.request.Cookies.Count; i++)
{
var httpCookie = this.request.Cookies[i];
Cookie cookie = null;
// try-catch needed as malformed cookie names (e.g. '$Version') can be returned
// from Cookie.Name, but the Cookie constructor will throw for these names.
try
{
cookie = new Cookie(httpCookie.Name, httpCookie.Value, httpCookie.Path, httpCookie.Domain)
{
HttpOnly = httpCookie.HttpOnly,
Secure = httpCookie.Secure,
Expires = httpCookie.Expires,
};
}
catch
{
// I don't like this, application code now has access to less data than it would when
// using the raw HttpRequest. Atleast logging that here would be nice?
// Not sure, leaving it up to you Demis.
}
if ( cookie != null )
cookies[httpCookie.Name] = cookie;
}
}
return cookies;
}
}
public NameValueCollection Headers
{
get { return request.Headers; }
}
public NameValueCollection QueryString
{
get { return request.QueryString; }
}
public NameValueCollection FormData
{
get { return request.Form; }
}
public string GetRawBody()
{
if (bufferedStream != null)
{
return bufferedStream.ToArray().FromUtf8Bytes();
}
using (var reader = new StreamReader(InputStream))
{
return reader.ReadToEnd();
}
}
public string RawUrl
{
get { return request.RawUrl; }
}
public string AbsoluteUri
{
get
{
try
{
return request.Url.AbsoluteUri.TrimEnd('/');
}
catch (Exception)
{
//fastcgi mono, do a 2nd rounds best efforts
return "http://" + request.UserHostName + request.RawUrl;
}
}
}
public string UserHostAddress
{
get { return request.UserHostAddress; }
}
public string XForwardedFor
{
get
{
return string.IsNullOrEmpty(request.Headers[HttpHeaders.XForwardedFor]) ? null : request.Headers[HttpHeaders.XForwardedFor];
}
}
public string XRealIp
{
get
{
return string.IsNullOrEmpty(request.Headers[HttpHeaders.XRealIp]) ? null : request.Headers[HttpHeaders.XRealIp];
}
}
private string remoteIp;
public string RemoteIp
{
get
{
return remoteIp ?? (remoteIp = XForwardedFor ?? (XRealIp ?? request.UserHostAddress));
}
}
public bool IsSecureConnection
{
get { return request.IsSecureConnection; }
}
public string[] AcceptTypes
{
get { return request.AcceptTypes; }
}
public string PathInfo
{
get { return request.GetPathInfo(); }
}
public string UrlHostName
{
get { return request.GetUrlHostName(); }
}
public bool UseBufferedStream
{
get { return bufferedStream != null; }
set
{
bufferedStream = value
? bufferedStream ?? new MemoryStream(request.InputStream.ReadFully())
: null;
}
}
private MemoryStream bufferedStream;
public Stream InputStream
{
get { return bufferedStream ?? request.InputStream; }
}
public long ContentLength
{
get { return request.ContentLength; }
}
private IFile[] files;
public IFile[] Files
{
get
{
if (files == null)
{
files = new IFile[request.Files.Count];
for (var i = 0; i < request.Files.Count; i++)
{
var reqFile = request.Files[i];
files[i] = new HttpFile
{
ContentType = reqFile.ContentType,
ContentLength = reqFile.ContentLength,
FileName = reqFile.FileName,
InputStream = reqFile.InputStream,
};
}
}
return files;
}
}
public string ApplicationFilePath
{
get { return physicalFilePath; }
}
public Uri UrlReferrer
{
get { return request.UrlReferrer; }
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using Microsoft.Azure.Commands.Compute.Automation.Models;
using Microsoft.Azure.Management.Compute;
using Microsoft.Azure.Management.Compute.Models;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Management.Automation;
namespace Microsoft.Azure.Commands.Compute.Automation
{
public partial class InvokeAzureComputeMethodCmdlet : ComputeAutomationBaseCmdlet
{
protected object CreateImageCreateOrUpdateDynamicParameters()
{
dynamicParameters = new RuntimeDefinedParameterDictionary();
var pResourceGroupName = new RuntimeDefinedParameter();
pResourceGroupName.Name = "ResourceGroupName";
pResourceGroupName.ParameterType = typeof(string);
pResourceGroupName.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 1,
Mandatory = true
});
pResourceGroupName.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ResourceGroupName", pResourceGroupName);
var pImageName = new RuntimeDefinedParameter();
pImageName.Name = "ImageName";
pImageName.ParameterType = typeof(string);
pImageName.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 2,
Mandatory = true
});
pImageName.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ImageName", pImageName);
var pParameters = new RuntimeDefinedParameter();
pParameters.Name = "Image";
pParameters.ParameterType = typeof(Image);
pParameters.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByDynamicParameters",
Position = 3,
Mandatory = true
});
pParameters.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("Image", pParameters);
var pArgumentList = new RuntimeDefinedParameter();
pArgumentList.Name = "ArgumentList";
pArgumentList.ParameterType = typeof(object[]);
pArgumentList.Attributes.Add(new ParameterAttribute
{
ParameterSetName = "InvokeByStaticParameters",
Position = 4,
Mandatory = true
});
pArgumentList.Attributes.Add(new AllowNullAttribute());
dynamicParameters.Add("ArgumentList", pArgumentList);
return dynamicParameters;
}
protected void ExecuteImageCreateOrUpdateMethod(object[] invokeMethodInputParameters)
{
string resourceGroupName = (string)ParseParameter(invokeMethodInputParameters[0]);
string imageName = (string)ParseParameter(invokeMethodInputParameters[1]);
Image parameters = (Image)ParseParameter(invokeMethodInputParameters[2]);
var result = ImagesClient.CreateOrUpdate(resourceGroupName, imageName, parameters);
WriteObject(result);
}
}
public partial class NewAzureComputeArgumentListCmdlet : ComputeAutomationBaseCmdlet
{
protected PSArgument[] CreateImageCreateOrUpdateParameters()
{
string resourceGroupName = string.Empty;
string imageName = string.Empty;
Image parameters = new Image();
return ConvertFromObjectsToArguments(
new string[] { "ResourceGroupName", "ImageName", "Parameters" },
new object[] { resourceGroupName, imageName, parameters });
}
}
[Cmdlet(VerbsCommon.New, "AzureRmImage", DefaultParameterSetName = "DefaultParameter", SupportsShouldProcess = true)]
[OutputType(typeof(PSImage))]
public partial class NewAzureRmImage : ComputeAutomationBaseCmdlet
{
protected override void ProcessRecord()
{
ExecuteClientAction(() =>
{
if (ShouldProcess(this.ImageName, VerbsCommon.New))
{
string resourceGroupName = this.ResourceGroupName;
string imageName = this.ImageName;
Image parameters = new Image();
ComputeAutomationAutoMapperProfile.Mapper.Map<PSImage, Image>(this.Image, parameters);
var result = ImagesClient.CreateOrUpdate(resourceGroupName, imageName, parameters);
var psObject = new PSImage();
ComputeAutomationAutoMapperProfile.Mapper.Map<Image, PSImage>(result, psObject);
WriteObject(psObject);
}
});
}
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 1,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[AllowNull]
public string ResourceGroupName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 2,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Alias("Name")]
[AllowNull]
public string ImageName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 3,
Mandatory = true,
ValueFromPipelineByPropertyName = false,
ValueFromPipeline = true)]
[AllowNull]
public PSImage Image { get; set; }
}
[Cmdlet(VerbsData.Update, "AzureRmImage", DefaultParameterSetName = "DefaultParameter", SupportsShouldProcess = true)]
[OutputType(typeof(PSImage))]
public partial class UpdateAzureRmImage : ComputeAutomationBaseCmdlet
{
protected override void ProcessRecord()
{
ExecuteClientAction(() =>
{
if (ShouldProcess(this.ImageName, VerbsData.Update))
{
string resourceGroupName = this.ResourceGroupName;
string imageName = this.ImageName;
Image parameters = new Image();
ComputeAutomationAutoMapperProfile.Mapper.Map<PSImage, Image>(this.Image, parameters);
var result = ImagesClient.CreateOrUpdate(resourceGroupName, imageName, parameters);
var psObject = new PSImage();
ComputeAutomationAutoMapperProfile.Mapper.Map<Image, PSImage>(result, psObject);
WriteObject(psObject);
}
});
}
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 1,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[AllowNull]
public string ResourceGroupName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 2,
Mandatory = true,
ValueFromPipelineByPropertyName = true,
ValueFromPipeline = false)]
[Alias("Name")]
[AllowNull]
public string ImageName { get; set; }
[Parameter(
ParameterSetName = "DefaultParameter",
Position = 3,
Mandatory = true,
ValueFromPipelineByPropertyName = false,
ValueFromPipeline = true)]
[AllowNull]
public PSImage Image { get; set; }
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using Ionic.Zip;
using NUnit.Framework;
using SIL.IO;
using SIL.Reporting;
using SIL.TestUtilities;
using SIL.Windows.Forms.ClearShare;
namespace SIL.Archiving.Tests
{
[TestFixture]
[Category("Archiving")]
public class RampArchivingDlgViewModelTests
{
private RampArchivingDlgViewModel _helper;
private Dictionary<string, Tuple<IEnumerable<string>, string>> _filesToAdd;
private bool? _isRampInstalled;
/// ------------------------------------------------------------------------------------
[SetUp]
public void Setup()
{
ErrorReport.IsOkToInteractWithUser = false;
_helper = new RampArchivingDlgViewModel("Test App", "Test Title", "tst", null,
SetFilesToArchive, GetFileDescription);
_helper.AppSpecificFilenameNormalization = CustomFilenameNormalization;
_filesToAdd = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
}
/// ------------------------------------------------------------------------------------
[TearDown]
public void TearDown()
{
_helper.CleanUp();
try { File.Delete(_helper.PackagePath); }
// ReSharper disable once EmptyGeneralCatchClause
catch { }
}
/// ------------------------------------------------------------------------------------
[Test]
public void CreateMetsFile_CreatesFile()
{
var metsPath = _helper.CreateMetsFile();
Assert.IsNotNull(metsPath);
Assert.IsTrue(File.Exists(metsPath));
}
/// ------------------------------------------------------------------------------------
[Test]
[Category("SkipOnTeamCity")]
public void CreateRampPackageWithSessionArchiveAndMetsFile_CreatesRampPackage()
{
TemporaryFolder tmpFolder = new TemporaryFolder("ArchiveHelperTestFolder");
try
{
string fileName = Path.Combine(tmpFolder.Path, "ddo.session");
File.CreateText(fileName).Close();
var fileList = new[] { Path.Combine(tmpFolder.Path, "ddo.session") };
_filesToAdd.Add(string.Empty, new Tuple<IEnumerable<string>, string>(fileList, "Message to display."));
_helper.Initialize();
_helper.CreateMetsFile();
Assert.IsTrue(_helper.CreateRampPackage());
Assert.IsTrue(File.Exists(_helper.PackagePath));
}
finally
{
tmpFolder.Dispose();
}
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_NullList_ReturnsNull()
{
Assert.IsNull(_helper.GetMode(null));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_EmptyList_ReturnsNull()
{
Assert.IsNull(_helper.GetMode(new string[0]));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_SingleTypeInList_ReturnsCorrectMetsList()
{
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kFileTypeModeList + "\":[\"" +
RampArchivingDlgViewModel.kModeVideo + "\"]", _helper.GetMode(new[] { "blah.mpg" }));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_MultipleTypesInList_ReturnsCorrectMetsList()
{
var mode = _helper.GetMode(new[] { "blah.mp3", "blah.doc", "blah.mov" });
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kFileTypeModeList + "\":[\"" +
RampArchivingDlgViewModel.kModeSpeech + "\",\"" +
RampArchivingDlgViewModel.kModeText + "\",\"" +
RampArchivingDlgViewModel.kModeVideo + "\"]", mode);
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_ZipFileWithMultipleTypesInList_ReturnsCorrectMetsList()
{
ZipFile zipFile = new ZipFile();
zipFile.AddEntry("blah.mp3", "whatever");
zipFile.AddEntry("blah.doc", "whatever");
zipFile.AddEntry("blah.niff", "whatever");
var tempFile = TempFile.WithExtension("zip");
try
{
zipFile.Save(tempFile.Path);
var mode = _helper.GetMode(new[] { zipFile.Name });
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kFileTypeModeList + "\":[\"" +
RampArchivingDlgViewModel.kModeSpeech + "\",\"" +
RampArchivingDlgViewModel.kModeText + "\",\"" +
RampArchivingDlgViewModel.kModeMusicalNotation + "\"]", mode);
}
finally
{
zipFile.Dispose();
tempFile.Dispose();
}
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_FwbackupFileWithMultipleTypesInList_ReturnsCorrectMetsList()
{
ZipFile zipFile = new ZipFile();
zipFile.AddEntry("blah.fwdata", "whatever");
zipFile.AddEntry("fonts/blah.ttf", "whatever");
zipFile.AddEntry("images/blah.jpeg", "whatever");
var tempFile = TempFile.WithExtension("fwbackup");
try
{
zipFile.Save(tempFile.Path);
var mode = _helper.GetMode(new[] { zipFile.Name });
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kFileTypeModeList + "\":[\"" +
RampArchivingDlgViewModel.kModeText + "\",\"" +
RampArchivingDlgViewModel.kModeDataset + "\",\"" +
RampArchivingDlgViewModel.kModeSoftwareOrFont + "\",\"" +
RampArchivingDlgViewModel.kModePhotograph + "\"]", mode);
}
finally
{
zipFile.Dispose();
tempFile.Dispose();
}
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetMode_ListContainsMultiplesOfOneType_ReturnsOnlyOneTypeInList()
{
var mode = _helper.GetMode(new[] { "blah.mp3", "blah.wma", "blah.wav" });
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kFileTypeModeList + "\":[\"" +
RampArchivingDlgViewModel.kModeSpeech + "\"]", mode);
}
#region GetSourceFilesForMetsData tests
/// ------------------------------------------------------------------------------------
[Test]
public void GetSourceFilesForMetsData_ListContainsOnlySessionMetaFile_ReturnsCorrectMetsData()
{
var fileLists = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
fileLists[string.Empty] = new Tuple<IEnumerable<string>, string>(new[] { "blah.session" }, "Message to display.");
var expected = "\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"blah.session\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Session Metadata (XML)\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"";
Assert.AreEqual(expected, _helper.GetSourceFilesForMetsData(fileLists).ElementAt(0));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetSourceFilesForMetsData_ListContainsOnlyPersonMetaFile_ReturnsCorrectMetsData()
{
var fileLists = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
fileLists[string.Empty] = new Tuple<IEnumerable<string>, string>(new[] { "blah.person" }, "Message to display.");
var expected = "\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"blah.person\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Contributor Metadata (XML)\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"";
Assert.AreEqual(expected, _helper.GetSourceFilesForMetsData(fileLists).ElementAt(0));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetSourceFilesForMetsData_ListContainsOnlyMetaFile_ReturnsCorrectMetsData()
{
var fileLists = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
fileLists[string.Empty] = new Tuple<IEnumerable<string>, string>(new[] { "blah.meta" }, "Message to display.");
var expected = "\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"blah.meta\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp File Metadata (XML)\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"";
Assert.AreEqual(expected, _helper.GetSourceFilesForMetsData(fileLists).ElementAt(0));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetSourceFilesForMetsData_ListContainsGenericSessionFile_ReturnsCorrectMetsData()
{
var fileLists = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
fileLists[string.Empty] = new Tuple<IEnumerable<string>, string>(new[] { "blah.wav" }, "Message to display.");
var expected = "\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"blah.wav\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Session File\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"";
Assert.AreEqual(expected, _helper.GetSourceFilesForMetsData(fileLists).ElementAt(0));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetSourceFilesForMetsData_ListContainsGenericPersonFile_ReturnsCorrectMetsData()
{
var fileLists = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
fileLists["Carmen"] = new Tuple<IEnumerable<string>, string>(new[] { "Carmen_blah.wav" }, "Message to display.");
var expected = "\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"__AppSpecific__Carmen_blah.wav\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Contributor File\"" + RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"";
Assert.AreEqual(expected, _helper.GetSourceFilesForMetsData(fileLists).ElementAt(0));
}
/// ------------------------------------------------------------------------------------
[Test]
public void GetSourceFilesForMetsData_ListMultipleFiles_ReturnsCorrectMetsData()
{
var fileLists = new Dictionary<string, Tuple<IEnumerable<string>, string>>();
fileLists[string.Empty] = new Tuple<IEnumerable<string>, string>(new[] { "blah.session", "really cool.wav" }, "Message to display.");
fileLists["person id"] = new Tuple<IEnumerable<string>, string>(new[] { "person id_blah.person", "person id_baa.mpg", "person id_baa.mpg.meta" }, "Message to display.");
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"blah.session\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Session Metadata (XML)\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"",
_helper.GetSourceFilesForMetsData(fileLists).ElementAt(0));
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"really-cool.wav\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Session File\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"",
_helper.GetSourceFilesForMetsData(fileLists).ElementAt(1));
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"__AppSpecific__person-id_blah.person\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Contributor Metadata (XML)\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"",
_helper.GetSourceFilesForMetsData(fileLists).ElementAt(2));
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"__AppSpecific__person-id_baa.mpg\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp Contributor File\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"",
_helper.GetSourceFilesForMetsData(fileLists).ElementAt(3));
Assert.AreEqual("\"" + RampArchivingDlgViewModel.kDefaultKey + "\":\"__AppSpecific__person-id_baa.mpg.meta\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileDescription + "\":\"MyApp File Metadata (XML)\"" +
RampArchivingDlgViewModel.kSeparator + "\"" +
RampArchivingDlgViewModel.kFileRelationship + "\":\"" +
RampArchivingDlgViewModel.kRelationshipSource + "\"",
_helper.GetSourceFilesForMetsData(fileLists).ElementAt(4));
}
#endregion
#region SetAudience tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetAudience_ChangeAudience_ThrowsInvalidOperationException()
{
_helper.SetAudience(AudienceType.Vernacular);
Assert.Throws<InvalidOperationException>(
() => _helper.SetAudience(AudienceType.Training)
);
}
#endregion
#region SetVernacularMaterialsAndContentType tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetVernacularMaterialsAndContentType_IncompatibleWithAudience_ThrowsInvalidOperationException()
{
_helper.SetAudience(AudienceType.Training);
Assert.Throws<InvalidOperationException>(
() => _helper.SetVernacularMaterialsAndContentType(VernacularMaterialsType.BibleBackground)
);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetVernacularMaterialsAndContentType_CompatibleWithAudience_IncludedInMetsData()
{
_helper.SetAudience(AudienceType.Vernacular);
_helper.SetVernacularMaterialsAndContentType(VernacularMaterialsType.LiteracyEducation_Riddles);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kAudience + "\":\"" + RampArchivingDlgViewModel.kAudienceVernacular + "\",\"" +
RampArchivingDlgViewModel.kVernacularMaterialsType + "\":\"" + RampArchivingDlgViewModel.kVernacularMaterialGeneral + "\",\"" +
RampArchivingDlgViewModel.kVernacularContent + "\":\"Riddles\"}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetVernacularMaterialsAndContentType_MixOfScriptureAndOther_ThrowsArgumentException()
{
Assert.Throws<ArgumentException>(
() => _helper.SetVernacularMaterialsAndContentType(VernacularMaterialsType.BibleStory | VernacularMaterialsType.CommunityAndCulture_Calendar)
);
}
#endregion
#region SetAbstract tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetAbstract_SetSingleAbstractWithoutLanguage_IncludedInMetsData()
{
_helper.SetAbstract("SayMore doesn't let the user specify the language explicitly.", string.Empty);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\"," +
"\"description.abstract.has\":\"Y\",\"dc.description.abstract\":{" +
"\"0\":{\" \":\"SayMore doesn't let the user specify the language explicitly.\"}}}",
data);
//"{\"dc.title\":\"what\",\"broad_type\":\"wider_audience\",\"dc.type.mode\":[\"Text\"],\"dc.description.stage\":\"rough_draft\"," +
// "\"version.type\":\"first\",\"dc.type.scholarlyWork\":\"Data set\",\"dc.subject.subjectLanguage\":{\"0\":{\"dialect\":\"\"}}," +
// "\"dc.language.iso\":{\"0\":{\"dialect\":\"\"}},\"dc.subject.silDomain\":[\"LING:Linguistics\"],\"sil.sensitivity.metadata\":\"Public\"," +
// "\"files\":{\"0\":{\" \":\"gmreadme.txt\",\"description\":\"junk\"}},\"status\":\"ready\"," +
// "\"description.abstract.has\":\"Y\",\"dc.description.abstract\":{\"0\":{\" \":\"\"SayMore doesn't let the use specify the language explicitly.\"}}}"
}
[Test]
public void SetAbstract_SetTwice_ThrowsInvalidOperationException()
{
_helper.SetAbstract("This is pretty abstract", "eng");
Dictionary<string, string> foreignLanguageAbstracts = new Dictionary<string, string>();
foreignLanguageAbstracts["fra"] = "C'est assez abstrait";
foreignLanguageAbstracts["spa"] = "Esto es bastante abstracto";
Assert.Throws<InvalidOperationException>(
() => _helper.SetAbstract(foreignLanguageAbstracts)
);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetAbstract_Null_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>(() => _helper.SetAbstract(null));
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetAbstract_ThreeLanguages_IncludedInMetsData()
{
Dictionary<string, string> abstracts = new Dictionary<string, string>();
abstracts["eng"] = "This is pretty abstract";
abstracts["fra"] = "C'est assez abstrait";
abstracts["spa"] = "Esto es bastante abstracto";
_helper.SetAbstract(abstracts);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\"," +
"\"description.abstract.has\":\"Y\",\"dc.description.abstract\":{" +
"\"0\":{\" \":\"This is pretty abstract\",\"lang\":\"eng\"}," +
"\"1\":{\" \":\"C'est assez abstrait\",\"lang\":\"fra\"}," +
"\"2\":{\" \":\"Esto es bastante abstracto\",\"lang\":\"spa\"}}}",
data);
}
#endregion
#region SetAudioVideoExtent tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetAudioVideoExtent_FreeFormString_IncludedInMetsData()
{
_helper.SetAudioVideoExtent("6 and a half seconds");
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kRecordingExtent + "\":\"6 and a half seconds\"}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetAudioVideoExtent_ValidTimeSpan_IncludedInMetsData()
{
TimeSpan duration = new TimeSpan(0, 2, 3, 4);
_helper.SetAudioVideoExtent(duration);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kRecordingExtent + "\":\"02:03:04\"}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetAudioVideoExtent_SetTwice_ThrowsInvalidOperationException()
{
_helper.SetAudioVideoExtent("twelve years or more");
TimeSpan duration = new TimeSpan(0, 2, 3, 4);
Assert.Throws<InvalidOperationException>(() => _helper.SetAudioVideoExtent(duration));
}
#endregion
#region SetContentLanguages tests
/// ------------------------------------------------------------------------------------
[Test]
[Category("SkipOnTeamCity")]
[Category("RampRequired")]
public void SetContentLanguages_TwoLanguages_IncludedInMetsData()
{
IgnoreTestIfRampIsNotInstalled();
Assert.Ignore("This test is no longer valid because RAMP 3.0 does not have a languages file");
_helper.SetContentLanguages("eng", "fra");
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kContentLanguages + "\":{\"0\":{\" \":\"eng:English\"},\"1\":{\" \":\"fra:French\"}}}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
[Category("SkipOnTeamCity")]
[Category("RampRequired")]
public void SetContentLanguages_SetTwice_ThrowsInvalidOperationException()
{
IgnoreTestIfRampIsNotInstalled();
_helper.SetContentLanguages("eng", "fra");
Assert.Throws<InvalidOperationException>(() => _helper.SetContentLanguages("spa", "fra"));
}
#endregion
#region SetContributors tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetContributors_Null_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>(() => _helper.SetContributors(null));
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetContributors_Empty_NoChangeToMetsData()
{
var dataBefore = _helper.GetMetadata();
var empty = new ContributionCollection();
_helper.SetContributors(empty);
var dataAfter = _helper.GetMetadata();
Assert.AreEqual(dataBefore, dataAfter);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetContributors_TwoContributors_IncludedInMetsData()
{
var contributors = new ContributionCollection();
OlacSystem olacSystem = new OlacSystem();
contributors.Add(new Contribution("Erkel", olacSystem.GetRoleByCodeOrThrow("author")));
contributors.Add(new Contribution("Sungfu", olacSystem.GetRoleByCodeOrThrow("recorder")));
_helper.SetContributors(contributors);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kContributor + "\":{\"0\":{\" \":\"Erkel\",\"role\":\"author\"},\"1\":{\" \":\"Sungfu\",\"role\":\"recorder\"}}}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetContributors_SetTwice_ThrowsInvalidOperationException()
{
var contributors = new ContributionCollection();
OlacSystem olacSystem = new OlacSystem();
Role role = olacSystem.GetRoleByCodeOrThrow("author");
var contrib = new Contribution("Erkel", role);
contributors.Add(contrib);
_helper.SetContributors(contributors);
Assert.Throws<InvalidOperationException>(() => _helper.SetContributors(contributors));
}
#endregion
#region SetCreationDate tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetCreationDate_FreeFormString_IncludedInMetsData()
{
_helper.SetCreationDate("four years ago");
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kDateCreated + "\":\"four years ago\"}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetCreationDate_ValidTimeSpan_IncludedInMetsData()
{
DateTime creationDate = new DateTime(2012, 4, 13);
_helper.SetCreationDate(creationDate);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kDateCreated + "\":\"2012-04-13\"}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetCreationDate_SetTwice_ThrowsInvalidOperationException()
{
_helper.SetCreationDate("tomorrow");
Assert.Throws<InvalidOperationException>(() => _helper.SetCreationDate(new DateTime(2012, 4, 13)));
}
#endregion
#region SetDatasetExtent tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetDatasetExtent_FreeFormString_IncludedInMetsData()
{
_helper.SetDatasetExtent("6 voice records and maybe an odd text file or two");
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" +
RampArchivingDlgViewModel.kDatasetExtent + "\":\"6 voice records and maybe an odd text file or two\"}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetDatasetExtent_SetTwice_ThrowsInvalidOperationException()
{
_helper.SetDatasetExtent("practically nothing");
Assert.Throws<InvalidOperationException>(() => _helper.SetDatasetExtent("lots of data"));
}
#endregion
#region SetDescription tests
/// ------------------------------------------------------------------------------------
[Test]
public void SetDescription_Null_ThrowsArgumentNullException()
{
Assert.Throws<ArgumentNullException>(() => _helper.SetDescription(null));
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetDescription_Empty_NoChangeToMetsData()
{
var dataBefore = _helper.GetMetadata();
_helper.SetDescription(new Dictionary<string, string>());
var dataAfter = _helper.GetMetadata();
Assert.AreEqual(dataBefore, dataAfter);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetDescription_TwoLanguages_IncludedInMetsData()
{
var descriptions = new Dictionary<string, string>();
descriptions["eng"] = "General data";
descriptions["spa"] = "Datos generales";
_helper.SetDescription(descriptions);
var data = _helper.GetMetadata();
Assert.AreEqual("{\"dc.title\":\"Test Title\",\"" + RampArchivingDlgViewModel.kFlagHasGeneralDescription + "\":\"Y\",\"" +
RampArchivingDlgViewModel.kGeneralDescription + "\":{\"0\":{\" \":\"General data\",\"lang\":\"eng\"},\"1\":{\" \":\"Datos generales\",\"lang\":\"spa\"}}}",
data);
}
/// ------------------------------------------------------------------------------------
[Test]
public void SetDescription_SetTwice_ThrowsInvalidOperationException()
{
var descriptions = new Dictionary<string, string>();
descriptions["eng"] = "General data";
_helper.SetDescription(descriptions);
Assert.Throws<InvalidOperationException>(() => _helper.SetDescription(descriptions));
}
#endregion
[Test]
public void GetEnglishName_GetFromCulture_ReturnsEnglishName()
{
var eng = new ArchivingLanguage("eng");
var fra = new ArchivingLanguage("fra");
var spa = new ArchivingLanguage("spa");
Assert.AreEqual("English", eng.EnglishName);
Assert.AreEqual("French", fra.EnglishName);
Assert.AreEqual("Spanish", spa.EnglishName);
}
/// ------------------------------------------------------------------------------------
[Test]
[Category("SkipOnTeamCity")]
[Category("RampRequired")]
public void GetLanguageName_English_ReturnsEnglish()
{
IgnoreTestIfRampIsNotInstalled();
Assert.Ignore("This test is no longer valid because RAMP 3.0 does not have a languages file");
var langName = _helper.GetLanguageName("eng");
Assert.AreEqual(langName, "English");
}
/// ------------------------------------------------------------------------------------
[Test]
[Category("SkipOnTeamCity")]
[Category("RampRequired")]
public void GetLanguageName_Gibberish_ReturnsNull()
{
IgnoreTestIfRampIsNotInstalled();
var langName = _helper.GetLanguageName("z23");
Assert.IsNull(langName);
}
[Test]
[Category("SkipOnTeamCity")]
[Category("RampRequired")]
public void GetLanguageName_ArchivingLanguage_ReturnsCorrectName()
{
IgnoreTestIfRampIsNotInstalled();
Assert.Ignore("This test is no longer valid because RAMP 3.0 does not have a languages file");
// FieldWorks associates the name "Chinese" with the ISO3 Code "cmn"
ArchivingLanguage lang = new ArchivingLanguage("cmn", "Chinese");
// RAMP reqires the name "Chinese, Mandarin"
Assert.AreEqual("Chinese, Mandarin", _helper.GetLanguageName(lang.Iso3Code));
}
/// ------------------------------------------------------------------------------------
[Test]
[Category("SkipOnTeamCity")]
[Category("RampRequired")]
public void GetRAMPFileLocation_RAMPInstalled_ReturnsFileLocation()
{
IgnoreTestIfRampIsNotInstalled();
var fileName = RampArchivingDlgViewModel.GetExeFileLocation();
Assert.IsTrue(File.Exists(fileName), "RAMP executable file not found.");
}
#region Private helper methods
/// ------------------------------------------------------------------------------------
private void SetFilesToArchive(ArchivingDlgViewModel model)
{
foreach (var kvp in _filesToAdd)
model.AddFileGroup(kvp.Key, kvp.Value.Item1, kvp.Value.Item2);
}
/// ------------------------------------------------------------------------------------
private string GetFileDescription(string key, string file)
{
var description = (key == string.Empty ? "MyApp Session File" : "MyApp Contributor File");
if (file.ToLower().EndsWith(".session"))
description = "MyApp Session Metadata (XML)";
else if (file.ToLower().EndsWith(".person"))
description = "MyApp Contributor Metadata (XML)";
else if (file.ToLower().EndsWith(".meta"))
description = "MyApp File Metadata (XML)";
return description;
}
/// ------------------------------------------------------------------------------------
private void CustomFilenameNormalization(string key, string file, StringBuilder bldr)
{
if (key != string.Empty)
bldr.Insert(0, "__AppSpecific__");
}
private void IgnoreTestIfRampIsNotInstalled()
{
if (!_isRampInstalled.HasValue)
{
// we remember the value so that we check only once. This won't change within
// a test run.
_isRampInstalled = !string.IsNullOrEmpty(RampArchivingDlgViewModel.GetExeFileLocation());
}
if (!_isRampInstalled.Value)
Assert.Ignore("This test requires RAMP");
}
#endregion
}
}
| |
//------------------------------------------------------------------------------
//
// zf - A command line archiver using the ZipFile class from SharpZipLib
// for compression
//
// Copyright 2006 John Reilly
//
//------------------------------------------------------------------------------
// Version History
// 1 Initial version ported from sz sample. Some stuff is not used or commented still
// 2 Display files during extract. --env Now shows .NET version information.
// 3 Add usezip64 option as a testing aid.
using System;
using System.IO;
using System.Collections;
using System.Text;
using System.Globalization;
using System.Diagnostics;
using System.Reflection;
using ICSharpCode.SharpZipLib.Zip;
using ICSharpCode.SharpZipLib.Core;
using ICSharpCode.SharpZipLib.Zip.Compression;
namespace SharpZip
{
/// <summary>
/// A command line archiver using the ZipFile class from SharpZipLib compression library
/// </summary>
public class ZipFileArchiver
{
#region Enumerations
/// <summary>
/// Options for handling overwriting of files.
/// </summary>
enum Overwrite
{
Prompt,
Never,
Always
}
/// <summary>
/// Kinds of thing we know how to do
/// </summary>
enum Operation
{
Create, // add files to new archive
Extract, // extract files from existing archive
List, // show contents of existing archive
Delete, // Delete from archive
Add, // Add to archive.
Test, // Test the archive for validity.
}
#endregion
#region Constructors
/// <summary>
/// Base constructor - initializes all fields to default values
/// </summary>
public ZipFileArchiver()
{
// Do nothing.
}
#endregion
#region Argument Parsing
/// <summary>
/// Parse command line arguments.
/// This is fairly flexible without using any custom classes. Arguments and options can appear
/// in any order and are case insensitive. Arguments for options are signalled with an '='
/// as in -demo=argument, sometimes the '=' can be omitted as well secretly.
/// Grouping of single character options is supported.
/// </summary>
/// <returns>
/// true if arguments are valid such that processing should continue
/// </returns>
bool SetArgs(string[] args)
{
bool result = true;
int argIndex = 0;
while (argIndex < args.Length)
{
if (args[argIndex][0] == '-' || args[argIndex][0] == '/')
{
string option = args[argIndex].Substring(1).ToLower();
string optArg = "";
int parameterIndex = option.IndexOf('=');
if (parameterIndex >= 0)
{
if (parameterIndex < option.Length - 1)
{
optArg = option.Substring(parameterIndex + 1);
}
option = option.Substring(0, parameterIndex);
}
#if OPTIONTEST
Console.WriteLine("args index [{0}] option [{1}] argument [{2}]", argIndex, option, optArg);
#endif
if (option.Length == 0)
{
System.Console.Error.WriteLine("Invalid argument (0}", args[argIndex]);
result = false;
}
else
{
int optionIndex = 0;
while (optionIndex < option.Length)
{
#if OPTIONTEST
Console.WriteLine("optionIndex {0}", optionIndex);
#endif
switch(option[optionIndex])
{
case '-': // long option
optionIndex = option.Length;
switch (option)
{
case "-add":
operation_ = Operation.Add;
break;
case "-create":
operation_ = Operation.Create;
break;
case "-list":
operation_ = Operation.List;
break;
case "-extract":
operation_ = Operation.Extract;
if (optArg.Length > 0)
{
targetOutputDirectory_ = optArg;
}
break;
case "-delete":
operation_ = Operation.Delete;
break;
case "-test":
operation_ = Operation.Test;
break;
case "-env":
ShowEnvironment();
break;
case "-emptydirs":
addEmptyDirectoryEntries_ = true;
break;
case "-data":
testData_ = true;
break;
case "-zip64":
if ( optArg.Length > 0 )
{
switch ( optArg )
{
case "on":
useZip64_ = UseZip64.On;
break;
case "off":
useZip64_ = UseZip64.Off;
break;
case "auto":
useZip64_ = UseZip64.Dynamic;
break;
}
}
break;
case "-encoding":
if (optArg.Length > 0)
{
if (IsNumeric(optArg))
{
try
{
int enc = int.Parse(optArg);
if (Encoding.GetEncoding(enc) != null)
{
#if OPTIONTEST
Console.WriteLine("Encoding set to {0}", enc);
#endif
ZipConstants.DefaultCodePage = enc;
}
else
{
result = false;
System.Console.Error.WriteLine("Invalid encoding " + args[argIndex]);
}
}
catch (Exception)
{
result = false;
System.Console.Error.WriteLine("Invalid encoding " + args[argIndex]);
}
}
else
{
try
{
ZipConstants.DefaultCodePage = Encoding.GetEncoding(optArg).CodePage;
}
catch (Exception)
{
result = false;
System.Console.Error.WriteLine("Invalid encoding " + args[argIndex]);
}
}
}
else
{
result = false;
System.Console.Error.WriteLine("Missing encoding parameter");
}
break;
case "-version":
ShowVersion();
break;
case "-help":
ShowHelp();
break;
case "-restore-dates":
restoreDateTime_ = true;
break;
default:
System.Console.Error.WriteLine("Invalid long argument " + args[argIndex]);
result = false;
break;
}
break;
case '?':
ShowHelp();
break;
case 's':
if (optionIndex != 0)
{
result = false;
System.Console.Error.WriteLine("-s cannot be in a group");
}
else
{
if (optArg.Length > 0)
{
password_ = optArg;
}
else if (option.Length > 1)
{
password_ = option.Substring(1);
}
else
{
System.Console.Error.WriteLine("Missing argument to " + args[argIndex]);
}
}
optionIndex = option.Length;
break;
case 't':
operation_ = Operation.Test;
break;
case 'c':
operation_ = Operation.Create;
break;
case 'e':
if (optionIndex != 0)
{
result = false;
System.Console.Error.WriteLine("-e cannot be in a group");
}
else
{
optionIndex = option.Length;
if (optArg.Length > 0)
{
try
{
compressionLevel_ = int.Parse(optArg);
}
catch (Exception)
{
System.Console.Error.WriteLine("Level invalid");
}
}
}
optionIndex = option.Length;
break;
case 'o':
optionIndex += 1;
overwriteFiles = optionIndex < option.Length ? (option[optionIndex] == '+') ? Overwrite.Always : Overwrite.Never : Overwrite.Never;
break;
case 'q':
silent_ = true;
if (overwriteFiles == Overwrite.Prompt)
{
overwriteFiles = Overwrite.Never;
}
break;
case 'r':
recursive_ = true;
break;
case 'v':
operation_ = Operation.List;
break;
case 'x':
if (optionIndex != 0)
{
result = false;
System.Console.Error.WriteLine("-x cannot be in a group");
}
else
{
operation_ = Operation.Extract;
if (optArg.Length > 0)
{
targetOutputDirectory_ = optArg;
}
}
optionIndex = option.Length;
break;
default:
System.Console.Error.WriteLine("Invalid argument: " + args[argIndex]);
result = false;
break;
}
++optionIndex;
}
}
}
else
{
#if OPTIONTEST
Console.WriteLine("file spec {0} = '{1}'", argIndex, args[argIndex]);
#endif
fileSpecs_.Add(args[argIndex]);
}
++argIndex;
}
if (fileSpecs_.Count > 0)
{
string checkPath = (string)fileSpecs_[0];
int deviceCheck = checkPath.IndexOf(':');
#if NET_VER_1
if (checkPath.IndexOfAny(Path.InvalidPathChars) >= 0
#else
if (checkPath.IndexOfAny(Path.GetInvalidPathChars()) >= 0
#endif
|| checkPath.IndexOf('*') >= 0 || checkPath.IndexOf('?') >= 0
|| ((deviceCheck >= 0) && (deviceCheck != 1)))
{
Console.WriteLine("There are invalid characters in the specified zip file name");
result = false;
}
}
return result && (fileSpecs_.Count > 0);
}
#endregion
#region Show - Help/Environment/Version
/// <summary>
/// Show encoding/locale information
/// </summary>
void ShowEnvironment()
{
seenHelp_ = true;
Console.Out.WriteLine("");
System.Console.Out.WriteLine(
"Current encoding is {0}, code page {1}, windows code page {2}",
System.Console.Out.Encoding.EncodingName,
System.Console.Out.Encoding.CodePage,
System.Console.Out.Encoding.WindowsCodePage);
System.Console.WriteLine("Default code page is {0}",
Encoding.Default.CodePage);
Console.WriteLine( "Current culture LCID 0x{0:X}, {1}", CultureInfo.CurrentCulture.LCID, CultureInfo.CurrentCulture.EnglishName);
Console.WriteLine( "Current thread OEM codepage {0}", System.Threading.Thread.CurrentThread.CurrentCulture.TextInfo.OEMCodePage);
Console.WriteLine( "Current thread Mac codepage {0}", System.Threading.Thread.CurrentThread.CurrentCulture.TextInfo.MacCodePage);
Console.WriteLine( "Current thread Ansi codepage {0}", System.Threading.Thread.CurrentThread.CurrentCulture.TextInfo.ANSICodePage);
Console.WriteLine(".NET version {0}", Environment.Version);
}
/// <summary>
/// Display version information
/// </summary>
void ShowVersion()
{
seenHelp_ = true;
Console.Out.WriteLine("ZipFile Archiver v0.3 Copyright 2006 John Reilly");
Assembly[] assemblies = AppDomain.CurrentDomain.GetAssemblies();
foreach (Assembly assembly in assemblies)
{
if (assembly.GetName().Name == "ICSharpCode.SharpZipLib")
{
Console.Out.WriteLine("#ZipLib v{0} {1}", assembly.GetName().Version,
assembly.GlobalAssemblyCache == true ? "Running from GAC" : "Running from DLL"
);
}
}
Console.Out.WriteLine();
}
/// <summary>
/// Show help on possible options and arguments
/// </summary>
void ShowHelp()
{
if (seenHelp_ == true)
{
return;
}
seenHelp_ = true;
ShowVersion();
Console.Out.WriteLine("usage zf {options} archive files");
Console.Out.WriteLine("");
Console.Out.WriteLine("Options:");
Console.Out.WriteLine("--add Add files to archive");
Console.Out.WriteLine("--create Create new archive");
Console.Out.WriteLine("--data Test archive data"); Console.Out.WriteLine("--delete Delete files from archive");
Console.Out.WriteLine("--encoding=codepage|name Set code page for encoding by name or number");
Console.Out.WriteLine("--extract{=dir} Extract archive contents to dir(default .)");
Console.Out.WriteLine("--help Show this help");
Console.Out.WriteLine("--env Show current environment information" );
Console.Out.WriteLine("--list List archive contents extended format");
Console.Out.WriteLine("--test Test archive for validity");
Console.Out.WriteLine("--version Show version information");
Console.Out.WriteLine("-r Recurse sub-folders");
Console.Out.WriteLine("-s=password Set archive password");
Console.Out.WriteLine("--zip64=[on|off|auto] Zip64 extension handling to use");
/*
Console.Out.WriteLine("--store Store entries (default=deflate)");
Console.Out.WriteLine("--emptydirs Create entries for empty directories");
Console.Out.WriteLine("--restore-dates Restore dates on extraction");
Console.Out.WriteLine("-o+ Overwrite files without prompting");
Console.Out.WriteLine("-o- Never overwrite files");
Console.Out.WriteLine("-q Quiet mode");
*/
Console.Out.WriteLine("");
}
#endregion
#region Archive Listing
void ListArchiveContents(ZipFile zipFile, FileInfo fileInfo)
{
const string headerTitles = "Name Length Ratio Size Date & time CRC-32 Attr";
const string headerUnderline = "------------ ---------- ----- ---------- ------------------- -------- ------";
int entryCount = 0;
long totalCompressedSize = 0;
long totalSize = 0;
foreach (ZipEntry theEntry in zipFile)
{
if ( theEntry.IsDirectory )
{
Console.Out.WriteLine("Directory {0}", theEntry.Name);
}
else if ( !theEntry.IsFile )
{
Console.Out.WriteLine("Non file entry {0}", theEntry.Name);
continue;
}
else
{
if (entryCount == 0)
{
Console.Out.WriteLine(headerTitles);
Console.Out.WriteLine(headerUnderline);
}
++entryCount;
int ratio = GetCompressionRatio(theEntry.CompressedSize, theEntry.Size);
totalSize += theEntry.Size;
totalCompressedSize += theEntry.CompressedSize;
char cryptoDisplay = ( theEntry.IsCrypted ) ? '*' : ' ';
if (theEntry.Name.Length > 12)
{
Console.Out.WriteLine(theEntry.Name);
Console.Out.WriteLine(
"{0,-12}{7} {1,10:0} {2,3}% {3,10:0} {4,10:d} {4:hh:mm:ss} {5,8:x} {6,4}",
"", theEntry.Size, ratio, theEntry.CompressedSize, theEntry.DateTime, theEntry.Crc,
InterpretExternalAttributes(theEntry.HostSystem, theEntry.ExternalFileAttributes),
cryptoDisplay);
}
else
{
Console.Out.WriteLine(
"{0,-12}{7} {1,10:0} {2,3}% {3,10:0} {4,10:d} {4:hh:mm:ss} {5,8:x} {6,4}",
theEntry.Name, theEntry.Size, ratio, theEntry.CompressedSize, theEntry.DateTime, theEntry.Crc,
InterpretExternalAttributes(theEntry.HostSystem, theEntry.ExternalFileAttributes),
cryptoDisplay);
}
}
}
if (entryCount == 0)
{
Console.Out.WriteLine("Archive is empty!");
}
else
{
Console.Out.WriteLine(headerUnderline);
Console.Out.WriteLine(
"{0,-12} {1,10:0} {2,3}% {3,10:0} {4,10:d} {4:hh:mm:ss}",
entryCount.ToString() + " entries", totalSize, GetCompressionRatio(totalCompressedSize, totalSize), fileInfo.Length, fileInfo.LastWriteTime);
}
}
/// <summary>
/// List zip file contents using ZipFile class
/// </summary>
/// <param name="fileName">File to list contents of</param>
void ListArchiveContents(string fileName)
{
try
{
FileInfo fileInfo = new FileInfo(fileName);
if (!fileInfo.Exists)
{
Console.Error.WriteLine("No such file exists {0}", fileName);
}
else
{
Console.Out.WriteLine(fileName);
try
{
using (ZipFile zipFile = new ZipFile(fileName))
{
ListArchiveContents(zipFile, fileInfo);
}
}
catch(Exception ex)
{
Console.Out.WriteLine("Problem reading archive - '{0}'", ex.Message);
}
}
}
catch(Exception exception)
{
Console.Error.WriteLine("Exception during list operation: {0}", exception.Message);
}
}
/// <summary>
/// Execute List operation
/// Currently only Zip files are supported
/// </summary>
/// <param name="fileSpecs">Files to list</param>
void List(ArrayList fileSpecs)
{
foreach (string spec in fileSpecs)
{
string pathName = Path.GetDirectoryName(spec);
if ( (pathName == null) || (pathName.Length == 0) )
{
pathName = @".\";
}
string[] names = Directory.GetFiles(pathName, Path.GetFileName(spec));
if (names.Length == 0)
{
Console.Error.WriteLine("No files found matching {0}", spec);
}
else
{
foreach (string file in names)
{
ListArchiveContents(file);
}
Console.Out.WriteLine("");
}
}
}
#endregion
#region Creation
/// <summary>
/// Create archives based on specifications passed and internal state
/// </summary>
void Create(ArrayList fileSpecs)
{
string zipFileName = fileSpecs[0] as string;
if (Path.GetExtension(zipFileName).Length == 0)
{
zipFileName = Path.ChangeExtension(zipFileName, ".zip");
}
fileSpecs.RemoveAt(0);
if ( (overwriteFiles == Overwrite.Never) && File.Exists(zipFileName))
{
System.Console.Error.WriteLine("File {0} already exists", zipFileName);
return;
}
try
{
using (ZipFile zf = ZipFile.Create(zipFileName) )
{
zf.Password = password_;
zf.UseZip64 = useZip64_;
zf.BeginUpdate();
activeZipFile_ = zf;
foreach (string spec in fileSpecs)
{
// This can fail with wildcards in spec...
string path = Path.GetDirectoryName(Path.GetFullPath(spec));
string fileSpec = Path.GetFileName(spec);
zf.NameTransform = new ZipNameTransform(path);
FileSystemScanner scanner = new FileSystemScanner(WildcardToRegex(fileSpec));
scanner.ProcessFile = new ProcessFileHandler(ProcessFile);
scanner.ProcessDirectory = new ProcessDirectoryHandler(ProcessDirectory);
scanner.Scan(path, recursive_);
}
zf.CommitUpdate();
}
}
catch (Exception ex)
{
Console.WriteLine("Problem creating archive - '{0}'", ex.Message);
}
}
#endregion
#region Extraction
/// <summary>
/// Extract a file storing its contents.
/// </summary>
/// <param name="inputStream">The input stream to source fiel contents from.</param>
/// <param name="theEntry">The <see cref="ZipEntry"/> representing the stored file details </param>
/// <param name="targetDir">The directory to store the output.</param>
/// <returns>True iff successful; false otherwise.</returns>
bool ExtractFile(Stream inputStream, ZipEntry theEntry, string targetDir)
{
// try and sort out the correct place to save this entry
string entryFileName;
if (Path.IsPathRooted(theEntry.Name))
{
string workName = Path.GetPathRoot(theEntry.Name);
workName = theEntry.Name.Substring(workName.Length);
entryFileName = Path.Combine(Path.GetDirectoryName(workName), Path.GetFileName(theEntry.Name));
}
else
{
entryFileName = theEntry.Name;
}
string targetName = Path.Combine(targetDir, entryFileName);
string fullPath = Path.GetDirectoryName(Path.GetFullPath(targetName));
#if TEST
Console.WriteLine("Decompress targetfile name " + entryFileName);
Console.WriteLine("Decompress targetpath " + fullPath);
#endif
// Could be an option or parameter to allow failure or try creation
if (Directory.Exists(fullPath) == false)
{
try
{
Directory.CreateDirectory(fullPath);
}
catch
{
return false;
}
}
else if (overwriteFiles == Overwrite.Prompt)
{
if (File.Exists(targetName) == true)
{
Console.Write("File " + targetName + " already exists. Overwrite? ");
// TODO: sort out the complexities of Read so single key press can be used
string readValue;
try
{
readValue = Console.ReadLine();
}
catch
{
readValue = null;
}
if ( (readValue == null) || (readValue.ToLower() != "y") )
{
return true;
}
}
}
if (entryFileName.Length > 0)
{
if ( !silent_ )
{
Console.Write("{0}", targetName);
}
using (FileStream outputStream = File.Create(targetName))
{
StreamUtils.Copy(inputStream, outputStream, GetBuffer());
}
if (restoreDateTime_)
{
File.SetLastWriteTime(targetName, theEntry.DateTime);
}
if ( !silent_ )
{
Console.WriteLine(" OK");
}
}
return true;
}
/// <summary>
/// Decompress a file
/// </summary>
/// <param name="fileName">File to decompress</param>
/// <param name="targetDir">Directory to create output in</param>
/// <returns>true iff all has been done successfully</returns>
bool DecompressArchive(string fileName, string targetDir)
{
bool result = true;
try
{
using (ZipFile zf = new ZipFile(fileName))
{
zf.Password = password_;
foreach ( ZipEntry entry in zf )
{
if ( entry.IsFile )
{
ExtractFile(zf.GetInputStream(entry), entry, targetDir);
}
else
{
if ( !silent_ )
{
Console.WriteLine("Skipping {0}", entry.Name);
}
}
}
if ( !silent_ )
{
Console.WriteLine("Done");
}
}
}
catch(Exception ex)
{
Console.WriteLine("Exception decompressing - '{0}'", ex);
result = false;
}
return result;
}
/// <summary>
/// Extract archives based on user input
/// Allows simple wildcards to specify multiple archives
/// </summary>
void Extract(ArrayList fileSpecs)
{
if ( (targetOutputDirectory_ == null) || (targetOutputDirectory_.Length == 0) )
{
targetOutputDirectory_ = @".\";
}
foreach(string spec in fileSpecs)
{
string [] names;
if ( (spec.IndexOf('*') >= 0) || (spec.IndexOf('?') >= 0) )
{
string pathName = Path.GetDirectoryName(spec);
if ( (pathName == null) || (pathName.Length == 0) )
{
pathName = @".\";
}
names = Directory.GetFiles(pathName, Path.GetFileName(spec));
}
else
{
names = new string[] { spec };
}
foreach (string fileName in names)
{
if (File.Exists(fileName) == false)
{
Console.Error.WriteLine("No such file exists {0}", fileName);
}
else
{
DecompressArchive(fileName, targetOutputDirectory_);
}
}
}
}
#endregion
#region Testing
/// <summary>
/// Handler for test result callbacks.
/// </summary>
/// <param name="status">The current <see cref="TestStatus"/>.</param>
/// <param name="message">The message applicable for this result.</param>
void TestResultHandler(TestStatus status, string message)
{
switch ( status.Operation )
{
case TestOperation.Initialising:
Console.WriteLine("Testing");
break;
case TestOperation.Complete:
Console.WriteLine("Testing complete");
break;
case TestOperation.EntryHeader:
// Not an error if message is null.
if ( message == null )
{
Console.Write("{0} - ", status.Entry.Name);
}
else
{
Console.WriteLine(message);
}
break;
case TestOperation.EntryData:
if ( message != null )
{
Console.WriteLine(message);
}
break;
case TestOperation.EntryComplete:
if ( status.EntryValid )
{
Console.WriteLine("OK");
}
break;
case TestOperation.MiscellaneousTests:
if ( message != null )
{
Console.WriteLine(message);
}
break;
}
}
/// <summary>
/// Test an archive to see if its valid.
/// </summary>
/// <param name="fileSpecs">The files to test.</param>
void Test(ArrayList fileSpecs)
{
string zipFileName = fileSpecs[0] as string;
if (Path.GetExtension(zipFileName).Length == 0)
{
zipFileName = Path.ChangeExtension(zipFileName, ".zip");
}
try
{
using (ZipFile zipFile = new ZipFile(zipFileName))
{
zipFile.Password = password_;
if ( zipFile.TestArchive(testData_, TestStrategy.FindAllErrors,
new ZipTestResultHandler(TestResultHandler)) )
{
Console.Out.WriteLine("Archive test passed");
}
else
{
Console.Out.WriteLine("Archive test failure");
}
}
}
catch(Exception ex)
{
Console.Out.WriteLine("Error list files - '{0}'", ex.Message);
}
}
#endregion
#region Deleting
/// <summary>
/// Delete entries from an archive
/// </summary>
/// <param name="fileSpecs">The file specs to operate on.</param>
void Delete(ArrayList fileSpecs)
{
string zipFileName = fileSpecs[0] as string;
if (Path.GetExtension(zipFileName).Length == 0)
{
zipFileName = Path.ChangeExtension(zipFileName, ".zip");
}
try
{
using (ZipFile zipFile = new ZipFile(zipFileName))
{
zipFile.BeginUpdate();
for ( int i = 1; i < fileSpecs.Count; ++i )
{
zipFile.Delete((string)fileSpecs[i]);
}
zipFile.CommitUpdate();
}
}
catch(Exception ex)
{
Console.WriteLine("Problem deleting files - '{0}'", ex.Message);
}
}
#endregion
#region Adding
/// <summary>
/// Callback for adding a new file.
/// </summary>
/// <param name="sender">The scanner calling this delegate.</param>
/// <param name="args">The event arguments.</param>
void ProcessFile(object sender, ScanEventArgs args)
{
if ( !silent_ )
{
Console.WriteLine(args.Name);
}
activeZipFile_.Add(args.Name);
}
/// <summary>
/// Callback for adding a new directory.
/// </summary>
/// <param name="sender">The scanner calling this delegate.</param>
/// <param name="args">The event arguments.</param>
/// <remarks>Directories are only added if they are empty and
/// the user has specified that empty directories are to be added.</remarks>
void ProcessDirectory(object sender, DirectoryEventArgs args)
{
if ( !args.HasMatchingFiles && addEmptyDirectoryEntries_ )
{
activeZipFile_.AddDirectory(args.Name);
}
}
/// <summary>
/// Add files to an archive
/// </summary>
/// <param name="fileSpecs">The specification for files to add.</param>
void Add(ArrayList fileSpecs)
{
string zipFileName = fileSpecs[0] as string;
if (Path.GetExtension(zipFileName).Length == 0)
{
zipFileName = Path.ChangeExtension(zipFileName, ".zip");
}
fileSpecs.RemoveAt(0);
ZipFile zipFile;
try
{
if ( File.Exists(zipFileName) )
{
zipFile = new ZipFile(zipFileName);
}
else
{
zipFile = ZipFile.Create(zipFileName);
}
using (zipFile)
{
zipFile.Password = password_;
zipFile.UseZip64 = useZip64_;
zipFile.BeginUpdate();
activeZipFile_ = zipFile;
foreach (string spec in fileSpecs)
{
string path = Path.GetDirectoryName(Path.GetFullPath(spec));
string fileSpec = Path.GetFileName(spec);
zipFile.NameTransform = new ZipNameTransform(path);
FileSystemScanner scanner = new FileSystemScanner(WildcardToRegex(fileSpec));
scanner.ProcessFile = new ProcessFileHandler(ProcessFile);
scanner.ProcessDirectory = new ProcessDirectoryHandler(ProcessDirectory);
scanner.Scan(path, recursive_);
}
zipFile.CommitUpdate();
}
}
catch(Exception ex)
{
Console.WriteLine("Problem adding to archive - '{0}'", ex.Message);
}
}
#endregion
#region Class Execute Command
/// <summary>
/// Parse command line arguments and 'execute' them.
/// </summary>
void Execute(string[] args)
{
if (SetArgs(args))
{
if (fileSpecs_.Count == 0)
{
if (!silent_)
{
Console.Out.WriteLine("Nothing to do");
}
}
else
{
switch (operation_)
{
case Operation.List:
List(fileSpecs_);
break;
case Operation.Create:
Create(fileSpecs_);
break;
case Operation.Extract:
Extract(fileSpecs_);
break;
case Operation.Delete:
Delete(fileSpecs_);
break;
case Operation.Add:
Add(fileSpecs_);
break;
case Operation.Test:
Test(fileSpecs_);
break;
}
}
}
else
{
if ( !silent_ )
{
ShowHelp();
}
}
}
#endregion
#region Support Routines
byte[] GetBuffer()
{
if ( buffer_ == null )
{
buffer_ = new byte[bufferSize_];
}
return buffer_;
}
#endregion
#region Static support routines
///<summary>
/// Calculate compression ratio as a percentage
/// Doesnt allow for expansion (ratio > 100) as the resulting strings can get huge easily
/// </summary>
static int GetCompressionRatio(long packedSize, long unpackedSize)
{
int result = 0;
if ( (unpackedSize > 0) && (unpackedSize >= packedSize) )
{
result = (int) Math.Round((1.0 - ((double)packedSize / (double)unpackedSize)) * 100.0);
}
return result;
}
/// <summary>
/// Interpret attributes in conjunction with operatingSystem
/// </summary>
/// <param name="operatingSystem">The operating system.</param>
/// <param name="attributes">The external attributes.</param>
/// <returns>A string representation of the attributres passed.</returns>
static string InterpretExternalAttributes(int operatingSystem, int attributes)
{
string result = string.Empty;
if ((operatingSystem == 0) || (operatingSystem == 10))
{
if ((attributes & 0x10) != 0)
result = result + "D";
else
result = result + "-";
if ((attributes & 0x08) != 0)
result = result + "V";
else
result = result + "-";
if ((attributes & 0x01) != 0)
result = result + "r";
else
result = result + "-";
if ((attributes & 0x20) != 0)
result = result + "a";
else
result = result + "-";
if ((attributes & 0x04) != 0)
result = result + "s";
else
result = result + "-";
if ((attributes & 0x02) != 0)
result = result + "h";
else
result = result + "-";
// Device
if ((attributes & 0x4) != 0)
result = result + "d";
else
result = result + "-";
// OS is NTFS
if ( operatingSystem == 10 )
{
// Encrypted
if ( (attributes & 0x4000) != 0 )
{
result += "E";
}
else
{
result += "-";
}
// Not content indexed
if ( (attributes & 0x2000) != 0 )
{
result += "n";
}
else
{
result += "-";
}
// Offline
if ( (attributes & 0x1000) != 0 )
{
result += "O";
}
else
{
result += "-";
}
// Compressed
if ( (attributes & 0x0800) != 0 )
{
result += "C";
}
else
{
result += "-";
}
// Reparse point
if ( (attributes & 0x0400) != 0 )
{
result += "R";
}
else
{
result += "-";
}
// Sparse
if ( (attributes & 0x0200) != 0 )
{
result += "S";
}
else
{
result += "-";
}
// Temporary
if ( (attributes & 0x0100) != 0 )
{
result += "T";
}
else
{
result += "-";
}
}
}
return result;
}
/// <summary>
/// Determine if string is numeric [0-9]+
/// </summary>
/// <param name="rhs">string to test</param>
/// <returns>true iff rhs is numeric</returns>
static bool IsNumeric(string rhs)
{
bool result;
if (rhs != null && rhs.Length > 0)
{
result = true;
for (int i = 0; i < rhs.Length; ++i)
{
if (!char.IsDigit(rhs[i]))
{
result = false;
break;
}
}
}
else
{
result = false;
}
return result;
}
/// <summary>
/// Make external attributes suitable for a <see cref="ZipEntry"/>
/// </summary>
/// <param name="info">The <see cref="FileInfo"/> to convert</param>
/// <returns>Returns External Attributes for Zip use</returns>
static int MakeExternalAttributes(FileInfo info)
{
return (int)info.Attributes;
}
/// <summary>
/// Convert a wildcard expression to a regular expression
/// </summary>
/// <param name="wildcard">The wildcard expression to convert.</param>
/// <returns>A regular expression representing the converted wildcard expression.</returns>
static string WildcardToRegex(string wildcard)
{
int dotPos = wildcard.IndexOf('.');
bool dotted = (dotPos >= 0) && (dotPos < wildcard.Length - 1);
string converted = wildcard.Replace(".", @"\.");
converted = converted.Replace("?", ".");
converted = converted.Replace("*", ".*");
converted = converted.Replace("(", @"\(");
converted = converted.Replace(")", @"\)");
if ( dotted )
{
converted += "$";
}
return converted;
}
#endregion
#region Main
/// <summary>
/// Entry point for program, creates archiver and runs it
/// </summary>
/// <param name="args">
/// Command line argument to process
/// </param>
public static void Main(string[] args)
{
ZipFileArchiver zf = new ZipFileArchiver();
zf.Execute(args);
}
#endregion
#region Instance Fields
/// <summary>
/// Has user already seen help output?
/// </summary>
bool seenHelp_;
/// <summary>
/// File specifications possibly with wildcards from command line
/// </summary>
ArrayList fileSpecs_ = new ArrayList();
/// <summary>
/// Deflate compression level
/// </summary>
int compressionLevel_ = Deflater.DEFAULT_COMPRESSION;
/// <summary>
/// Create entries for directories with no files
/// </summary>
bool addEmptyDirectoryEntries_;
/// <summary>
/// Apply operations recursively
/// </summary>
bool recursive_;
/// <summary>
/// Operate silently
/// </summary>
bool silent_;
/// <summary>
/// Restore file date and time to that stored in zip file on extraction
/// </summary>
bool restoreDateTime_;
/// <summary>
/// Overwrite files handling
/// </summary>
Overwrite overwriteFiles = Overwrite.Prompt;
/// <summary>
/// Optional password for archive
/// </summary>
string password_;
/// <summary>
/// Where things will go when decompressed.
/// </summary>
string targetOutputDirectory_;
/// <summary>
/// What to do based on parsed command line arguments
/// </summary>
Operation operation_ = Operation.List;
/// <summary>
/// Flag whose value is true if data should be tested; false if it should not.
/// </summary>
bool testData_;
/// <summary>
/// The currently active <see cref="ZipFile"/>.
/// </summary>
/// <remarks>Used for callbacks/delegates</remarks>
ZipFile activeZipFile_;
/// <summary>
/// Buffer used during some operations
/// </summary>
byte[] buffer_;
/// <summary>
/// The size of buffer to provide. <see cref="GetBuffer"></see>
/// </summary>
int bufferSize_ = 4096;
/// <summary>
/// The Zip64 extension use to apply.
/// </summary>
UseZip64 useZip64_ = UseZip64.Off;
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Globalization;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
namespace System
{
[Serializable]
[StructLayout(LayoutKind.Sequential)]
[TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public readonly struct Int32 : IComparable, IConvertible, IFormattable, IComparable<int>, IEquatable<int>, ISpanFormattable
{
private readonly int m_value; // Do not rename (binary serialization)
public const int MaxValue = 0x7fffffff;
public const int MinValue = unchecked((int)0x80000000);
// Compares this object to another object, returning an integer that
// indicates the relationship.
// Returns :
// 0 if the values are equal
// Negative number if _value is less than value
// Positive number if _value is more than value
// null is considered to be less than any instance, hence returns positive number
// If object is not of type Int32, this method throws an ArgumentException.
//
public int CompareTo(object? value)
{
if (value == null)
{
return 1;
}
if (value is int)
{
// NOTE: Cannot use return (_value - value) as this causes a wrap
// around in cases where _value - value > MaxValue.
int i = (int)value;
if (m_value < i) return -1;
if (m_value > i) return 1;
return 0;
}
throw new ArgumentException(SR.Arg_MustBeInt32);
}
public int CompareTo(int value)
{
// NOTE: Cannot use return (_value - value) as this causes a wrap
// around in cases where _value - value > MaxValue.
if (m_value < value) return -1;
if (m_value > value) return 1;
return 0;
}
public override bool Equals(object? obj)
{
if (!(obj is int))
{
return false;
}
return m_value == ((int)obj).m_value;
}
[NonVersionable]
public bool Equals(int obj)
{
return m_value == obj;
}
// The absolute value of the int contained.
public override int GetHashCode()
{
return m_value;
}
public override string ToString()
{
return Number.FormatInt32(m_value, null, null);
}
public string ToString(string? format)
{
return Number.FormatInt32(m_value, format, null);
}
public string ToString(IFormatProvider? provider)
{
return Number.FormatInt32(m_value, null, provider);
}
public string ToString(string? format, IFormatProvider? provider)
{
return Number.FormatInt32(m_value, format, provider);
}
public bool TryFormat(Span<char> destination, out int charsWritten, ReadOnlySpan<char> format = default, IFormatProvider? provider = null)
{
return Number.TryFormatInt32(m_value, format, provider, destination, out charsWritten);
}
public static int Parse(string s)
{
if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s);
return Number.ParseInt32(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo);
}
public static int Parse(string s, NumberStyles style)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s);
return Number.ParseInt32(s, style, NumberFormatInfo.CurrentInfo);
}
// Parses an integer from a String in the given style. If
// a NumberFormatInfo isn't specified, the current culture's
// NumberFormatInfo is assumed.
//
public static int Parse(string s, IFormatProvider? provider)
{
if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s);
return Number.ParseInt32(s, NumberStyles.Integer, NumberFormatInfo.GetInstance(provider));
}
// Parses an integer from a String in the given style. If
// a NumberFormatInfo isn't specified, the current culture's
// NumberFormatInfo is assumed.
//
public static int Parse(string s, NumberStyles style, IFormatProvider? provider)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
if (s == null) ThrowHelper.ThrowArgumentNullException(ExceptionArgument.s);
return Number.ParseInt32(s, style, NumberFormatInfo.GetInstance(provider));
}
public static int Parse(ReadOnlySpan<char> s, NumberStyles style = NumberStyles.Integer, IFormatProvider? provider = null)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return Number.ParseInt32(s, style, NumberFormatInfo.GetInstance(provider));
}
// Parses an integer from a String. Returns false rather
// than throwing exceptin if input is invalid
//
public static bool TryParse(string? s, out int result)
{
if (s == null)
{
result = 0;
return false;
}
return Number.TryParseInt32IntegerStyle(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result) == Number.ParsingStatus.OK;
}
public static bool TryParse(ReadOnlySpan<char> s, out int result)
{
return Number.TryParseInt32IntegerStyle(s, NumberStyles.Integer, NumberFormatInfo.CurrentInfo, out result) == Number.ParsingStatus.OK;
}
// Parses an integer from a String in the given style. Returns false rather
// than throwing exceptin if input is invalid
//
public static bool TryParse(string? s, NumberStyles style, IFormatProvider? provider, out int result)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
if (s == null)
{
result = 0;
return false;
}
return Number.TryParseInt32(s, style, NumberFormatInfo.GetInstance(provider), out result) == Number.ParsingStatus.OK;
}
public static bool TryParse(ReadOnlySpan<char> s, NumberStyles style, IFormatProvider? provider, out int result)
{
NumberFormatInfo.ValidateParseStyleInteger(style);
return Number.TryParseInt32(s, style, NumberFormatInfo.GetInstance(provider), out result) == Number.ParsingStatus.OK;
}
//
// IConvertible implementation
//
public TypeCode GetTypeCode()
{
return TypeCode.Int32;
}
bool IConvertible.ToBoolean(IFormatProvider? provider)
{
return Convert.ToBoolean(m_value);
}
char IConvertible.ToChar(IFormatProvider? provider)
{
return Convert.ToChar(m_value);
}
sbyte IConvertible.ToSByte(IFormatProvider? provider)
{
return Convert.ToSByte(m_value);
}
byte IConvertible.ToByte(IFormatProvider? provider)
{
return Convert.ToByte(m_value);
}
short IConvertible.ToInt16(IFormatProvider? provider)
{
return Convert.ToInt16(m_value);
}
ushort IConvertible.ToUInt16(IFormatProvider? provider)
{
return Convert.ToUInt16(m_value);
}
int IConvertible.ToInt32(IFormatProvider? provider)
{
return m_value;
}
uint IConvertible.ToUInt32(IFormatProvider? provider)
{
return Convert.ToUInt32(m_value);
}
long IConvertible.ToInt64(IFormatProvider? provider)
{
return Convert.ToInt64(m_value);
}
ulong IConvertible.ToUInt64(IFormatProvider? provider)
{
return Convert.ToUInt64(m_value);
}
float IConvertible.ToSingle(IFormatProvider? provider)
{
return Convert.ToSingle(m_value);
}
double IConvertible.ToDouble(IFormatProvider? provider)
{
return Convert.ToDouble(m_value);
}
decimal IConvertible.ToDecimal(IFormatProvider? provider)
{
return Convert.ToDecimal(m_value);
}
DateTime IConvertible.ToDateTime(IFormatProvider? provider)
{
throw new InvalidCastException(SR.Format(SR.InvalidCast_FromTo, "Int32", "DateTime"));
}
object IConvertible.ToType(Type type, IFormatProvider? provider)
{
return Convert.DefaultToType((IConvertible)this, type, provider);
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ec2-2014-10-01.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.EC2.Model
{
/// <summary>
/// Container for the parameters to the CreateVolume operation.
/// Creates an Amazon EBS volume that can be attached to an instance in the same Availability
/// Zone. The volume is created in the regional endpoint that you send the HTTP request
/// to. For more information see <a href="http://docs.aws.amazon.com/general/latest/gr/rande.html">Regions
/// and Endpoints</a>.
///
///
/// <para>
/// You can create a new empty volume or restore a volume from an Amazon EBS snapshot.
/// Any AWS Marketplace product codes from the snapshot are propagated to the volume.
/// </para>
///
/// <para>
/// You can create encrypted volumes with the <code>Encrypted</code> parameter. Encrypted
/// volumes may only be attached to instances that support Amazon EBS encryption. Volumes
/// that are created from encrypted snapshots are also automatically encrypted. For more
/// information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html">Amazon
/// EBS Encryption</a> in the <i>Amazon Elastic Compute Cloud User Guide for Linux</i>.
/// </para>
///
/// <para>
/// For more information, see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ebs-creating-volume.html">Creating
/// or Restoring an Amazon EBS Volume</a> in the <i>Amazon Elastic Compute Cloud User
/// Guide for Linux</i>.
/// </para>
/// </summary>
public partial class CreateVolumeRequest : AmazonEC2Request
{
private string _availabilityZone;
private bool? _encrypted;
private int? _iops;
private string _kmsKeyId;
private int? _size;
private string _snapshotId;
private VolumeType _volumeType;
/// <summary>
/// Empty constructor used to set properties independently even when a simple constructor is available
/// </summary>
public CreateVolumeRequest() { }
/// <summary>
/// Instantiates CreateVolumeRequest with the parameterized properties
/// </summary>
/// <param name="availabilityZone">The Availability Zone in which to create the volume. Use <a>DescribeAvailabilityZones</a> to list the Availability Zones that are currently available to you.</param>
/// <param name="size">The size of the volume, in GiBs. Constraints: If the volume type is <code>io1</code>, the minimum size of the volume is 4 GiB; otherwise, the minimum size is 1 GiB. The maximum volume size is 1024 GiB. If you specify a snapshot, the volume size must be equal to or larger than the snapshot size. Default: If you're creating the volume from a snapshot and don't specify a volume size, the default is the snapshot size.</param>
public CreateVolumeRequest(string availabilityZone, int size)
{
_availabilityZone = availabilityZone;
_size = size;
}
/// <summary>
/// Instantiates CreateVolumeRequest with the parameterized properties
/// </summary>
/// <param name="availabilityZone">The Availability Zone in which to create the volume. Use <a>DescribeAvailabilityZones</a> to list the Availability Zones that are currently available to you.</param>
/// <param name="snapshotId">The snapshot from which to create the volume.</param>
public CreateVolumeRequest(string availabilityZone, string snapshotId)
{
_availabilityZone = availabilityZone;
_snapshotId = snapshotId;
}
/// <summary>
/// Gets and sets the property AvailabilityZone.
/// <para>
/// The Availability Zone in which to create the volume. Use <a>DescribeAvailabilityZones</a>
/// to list the Availability Zones that are currently available to you.
/// </para>
/// </summary>
public string AvailabilityZone
{
get { return this._availabilityZone; }
set { this._availabilityZone = value; }
}
// Check to see if AvailabilityZone property is set
internal bool IsSetAvailabilityZone()
{
return this._availabilityZone != null;
}
/// <summary>
/// Gets and sets the property Encrypted.
/// <para>
/// Specifies whether the volume should be encrypted. Encrypted Amazon EBS volumes may
/// only be attached to instances that support Amazon EBS encryption. Volumes that are
/// created from encrypted snapshots are automatically encrypted. There is no way to create
/// an encrypted volume from an unencrypted snapshot or vice versa. If your AMI uses encrypted
/// volumes, you can only launch it on supported instance types. For more information,
/// see <a href="http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/EBSEncryption.html">Amazon
/// EBS Encryption</a> in the <i>Amazon Elastic Compute Cloud User Guide for Linux</i>.
///
/// </para>
/// </summary>
public bool Encrypted
{
get { return this._encrypted.GetValueOrDefault(); }
set { this._encrypted = value; }
}
// Check to see if Encrypted property is set
internal bool IsSetEncrypted()
{
return this._encrypted.HasValue;
}
/// <summary>
/// Gets and sets the property Iops.
/// <para>
/// Only valid for Provisioned IOPS (SSD) volumes. The number of I/O operations per second
/// (IOPS) to provision for the volume.
/// </para>
/// </summary>
public int Iops
{
get { return this._iops.GetValueOrDefault(); }
set { this._iops = value; }
}
// Check to see if Iops property is set
internal bool IsSetIops()
{
return this._iops.HasValue;
}
/// <summary>
/// Gets and sets the property KmsKeyId.
/// <para>
/// The full ARN of the AWS Key Management Service (KMS) master key to use when creating
/// the encrypted volume. This parameter is only required if you want to use a non-default
/// master key; if this parameter is not specified, the default master key is used. The
/// ARN contains the <code>arn:aws:kms</code> namespace, followed by the region of the
/// master key, the AWS account ID of the master key owner, the <code>key</code> namespace,
/// and then the master key ID. For example, arn:aws:kms:<i>us-east-1</i>:<i>012345678910</i>:key/<i>abcd1234-a123-456a-a12b-a123b4cd56ef</i>.
/// </para>
/// </summary>
public string KmsKeyId
{
get { return this._kmsKeyId; }
set { this._kmsKeyId = value; }
}
// Check to see if KmsKeyId property is set
internal bool IsSetKmsKeyId()
{
return this._kmsKeyId != null;
}
/// <summary>
/// Gets and sets the property Size.
/// <para>
/// The size of the volume, in GiBs.
/// </para>
///
/// <para>
/// Constraints: If the volume type is <code>io1</code>, the minimum size of the volume
/// is 4 GiB; otherwise, the minimum size is 1 GiB. The maximum volume size is 1024 GiB.
/// If you specify a snapshot, the volume size must be equal to or larger than the snapshot
/// size.
/// </para>
///
/// <para>
/// Default: If you're creating the volume from a snapshot and don't specify a volume
/// size, the default is the snapshot size.
/// </para>
/// </summary>
public int Size
{
get { return this._size.GetValueOrDefault(); }
set { this._size = value; }
}
// Check to see if Size property is set
internal bool IsSetSize()
{
return this._size.HasValue;
}
/// <summary>
/// Gets and sets the property SnapshotId.
/// <para>
/// The snapshot from which to create the volume.
/// </para>
/// </summary>
public string SnapshotId
{
get { return this._snapshotId; }
set { this._snapshotId = value; }
}
// Check to see if SnapshotId property is set
internal bool IsSetSnapshotId()
{
return this._snapshotId != null;
}
/// <summary>
/// Gets and sets the property VolumeType.
/// <para>
/// The volume type. This can be <code>gp2</code> for General Purpose (SSD) volumes, <code>io1</code>
/// for Provisioned IOPS (SSD) volumes, or <code>standard</code> for Magnetic volumes.
/// </para>
///
/// <para>
/// Default: <code>standard</code>
/// </para>
/// </summary>
public VolumeType VolumeType
{
get { return this._volumeType; }
set { this._volumeType = value; }
}
// Check to see if VolumeType property is set
internal bool IsSetVolumeType()
{
return this._volumeType != null;
}
}
}
| |
// Copyright (c) Ugo Lattanzi. All Rights Reserved. Licensed under the MIT license. See License.txt in the project root for license information.
using System;
using System.Net.Security;
using System.Security.Authentication;
using StackExchange.Redis.Extensions.Core.Abstractions;
using StackExchange.Redis.Extensions.Core.Implementations;
using StackExchange.Redis.Extensions.Core.Models;
using StackExchange.Redis.Profiling;
namespace StackExchange.Redis.Extensions.Core.Configuration;
/// <summary>
/// The redis configuration
/// </summary>
public class RedisConfiguration
{
private ConfigurationOptions? options;
private string keyPrefix = string.Empty;
private string? password;
private bool allowAdmin;
private bool ssl;
private int connectTimeout = 5000;
private int syncTimeout = 1000;
private bool abortOnConnectFail;
private int database;
private RedisHost[] hosts = Array.Empty<RedisHost>();
private ServerEnumerationStrategy serverEnumerationStrategy = new();
private uint maxValueLength;
private int poolSize = 5;
private string[]? excludeCommands;
private string? configurationChannel;
private string? connectionString;
private string? serviceName;
private SslProtocols? sslProtocols;
private Func<ProfilingSession>? profilingSessionProvider;
private int workCount = Environment.ProcessorCount * 2;
private ConnectionSelectionStrategy connectionSelectionStrategy = ConnectionSelectionStrategy.LeastLoaded;
/// <summary>
/// A RemoteCertificateValidationCallback delegate responsible for validating the certificate supplied by the remote party; note
/// that this cannot be specified in the configuration-string.
/// </summary>
public event RemoteCertificateValidationCallback? CertificateValidation;
/// <summary>
/// Indicate if the current configuration is the default;
/// </summary>
public bool IsDefault { get; set; }
/// <summary>
/// The identifier name for the connection
/// </summary>
public string? Name { get; set; }
/// <summary>
/// Gets or sets the every ConnectionSelectionStrategy to use during connection selection.
/// </summary>
public ConnectionSelectionStrategy ConnectionSelectionStrategy
{
get => connectionSelectionStrategy;
set
{
connectionSelectionStrategy = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the every ConnectionMultiplexer SocketManager WorkCount
/// </summary>
public int WorkCount
{
get => workCount;
set
{
workCount = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the servicename used in case of Sentinel.
/// </summary>
public string? ServiceName
{
get => serviceName;
set
{
serviceName = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets a value indicating whether get a boolean value that indicates if the cluster is configured for sentinel or not
/// </summary>
public bool IsSentinelCluster => !string.IsNullOrEmpty(ServiceName);
/// <summary>
/// Gets or sets the connection string. In wins over property configuration.
/// </summary>
public SslProtocols? SslProtocols
{
get => sslProtocols;
set
{
sslProtocols = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the connection string. In wins over property configuration.
/// </summary>
public string? ConnectionString
{
get => connectionString;
set
{
connectionString = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the channel to use for broadcasting and listening for configuration change notification.
/// </summary>
public string? ConfigurationChannel
{
get => configurationChannel;
set
{
configurationChannel = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the key separation prefix used for all cache entries.
/// </summary>
public string KeyPrefix
{
get => keyPrefix;
set
{
keyPrefix = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the redis password.
/// </summary>
public string? Password
{
get => password;
set
{
password = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets a value indicating whether gets or sets whether admin operations should be allowed.
/// </summary>
public bool AllowAdmin
{
get => allowAdmin;
set
{
allowAdmin = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets a value indicating whether specify if whether the connection should be encrypted.
/// </summary>
public bool Ssl
{
get => ssl;
set
{
ssl = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the time in milliseconds that should be allowed for connection (defaults to 5 seconds unless SyncTimeout is higher).
/// </summary>
public int ConnectTimeout
{
get => connectTimeout;
set
{
connectTimeout = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the time in milliseconds that the system should allow for synchronous operations (defaults to 5 seconds).
/// </summary>
public int SyncTimeout
{
get => syncTimeout;
set
{
syncTimeout = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets a value indicating whether gets or sets whether connect/configuration timeouts should be explicitly notified via a TimeoutException.
/// </summary>
public bool AbortOnConnectFail
{
get => abortOnConnectFail;
set
{
abortOnConnectFail = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets database Id.
/// </summary>
public int Database
{
get => database;
set
{
database = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the host of Redis Servers (The ips or names).
/// </summary>
public RedisHost[] Hosts
{
get => hosts;
set
{
hosts = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the strategy to use when executing server wide commands.
/// </summary>
public ServerEnumerationStrategy ServerEnumerationStrategy
{
get => serverEnumerationStrategy;
set
{
serverEnumerationStrategy = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets maximal value length which can be set in database.
/// </summary>
public uint MaxValueLength
{
get => maxValueLength;
set
{
maxValueLength = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets redis connections pool size.
/// </summary>
public int PoolSize
{
get => poolSize;
set
{
poolSize = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets exclude commands.
/// </summary>
public string[]? ExcludeCommands
{
get => excludeCommands;
set
{
excludeCommands = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets redis Profiler to attach to ConnectionMultiplexer.
/// </summary>
public Func<ProfilingSession>? ProfilingSessionProvider
{
get => profilingSessionProvider;
set
{
profilingSessionProvider = value;
ResetConfigurationOptions();
}
}
/// <summary>
/// Gets or sets the factory for <see cref="IStateAwareConnection"/> creation
/// </summary>
/// <returns>>If property is not set, default <see cref="IStateAwareConnection"/> will be resolved</returns>
/// <remarks>
/// Proprerty is optional.
/// Property should be assined by invocation code only once. (We are not doing additional checks in the property itself in order to prevent any possible issues during serialization)
/// </remarks>
public StateAwareConnectionResolver StateAwareConnectionFactory { get; set; } = (cm, logger) => new RedisConnectionPoolManager.StateAwareConnection(cm, logger);
/// <summary>
/// Gets the Redis configuration options
/// </summary>
/// <value>An instanfe of <see cref="ConfigurationOptions" />.</value>
public ConfigurationOptions ConfigurationOptions
{
get
{
if (options == null)
{
ConfigurationOptions newOptions;
if (!string.IsNullOrEmpty(ConnectionString))
{
newOptions = ConfigurationOptions.Parse(ConnectionString);
}
else
{
newOptions = new()
{
Ssl = Ssl,
AllowAdmin = AllowAdmin,
Password = Password,
ConnectTimeout = ConnectTimeout,
SyncTimeout = SyncTimeout,
AbortOnConnectFail = AbortOnConnectFail,
ConfigurationChannel = ConfigurationChannel,
SslProtocols = sslProtocols,
ChannelPrefix = KeyPrefix
};
if (IsSentinelCluster)
{
newOptions.ServiceName = ServiceName;
newOptions.CommandMap = CommandMap.Sentinel;
}
foreach (var redisHost in Hosts)
newOptions.EndPoints.Add(redisHost.Host, redisHost.Port);
}
if (ExcludeCommands != null)
{
newOptions.CommandMap = CommandMap.Create(
new(ExcludeCommands),
false);
}
if (WorkCount > 0)
newOptions.SocketManager = new(GetType().Name, WorkCount);
newOptions.CertificateValidation += CertificateValidation;
options = newOptions;
}
return options;
}
}
private void ResetConfigurationOptions()
{
// this is needed in order to cover this scenario
// https://github.com/imperugo/StackExchange.Redis.Extensions/issues/165
options = null;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Linq;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.IO.Pipes.Tests
{
/// <summary>
/// The Simple NamedPipe tests cover potentially every-day scenarios that are shared
/// by all NamedPipes whether they be Server/Client or In/Out/Inout.
/// </summary>
public abstract class NamedPipeTest_Simple : NamedPipeTestBase
{
/// <summary>
/// Yields every combination of testing options for the OneWayReadWrites test
/// </summary>
/// <returns></returns>
public static IEnumerable<object[]> OneWayReadWritesMemberData()
{
var options = new[] { PipeOptions.None, PipeOptions.Asynchronous };
var bools = new[] { false, true };
foreach (PipeOptions serverOption in options)
foreach (PipeOptions clientOption in options)
foreach (bool asyncServerOps in bools)
foreach (bool asyncClientOps in bools)
yield return new object[] { serverOption, clientOption, asyncServerOps, asyncClientOps };
}
[Theory]
[MemberData(nameof(OneWayReadWritesMemberData))]
public async Task OneWayReadWrites(PipeOptions serverOptions, PipeOptions clientOptions, bool asyncServerOps, bool asyncClientOps)
{
using (NamedPipePair pair = CreateNamedPipePair(serverOptions, clientOptions))
{
NamedPipeClientStream client = pair.clientStream;
NamedPipeServerStream server = pair.serverStream;
byte[] received = new byte[] { 0 };
Task clientTask = Task.Run(async () =>
{
if (asyncClientOps)
{
await client.ConnectAsync();
if (pair.writeToServer)
{
received = await ReadBytesAsync(client, sendBytes.Length);
}
else
{
await WriteBytesAsync(client, sendBytes);
}
}
else
{
client.Connect();
if (pair.writeToServer)
{
received = ReadBytes(client, sendBytes.Length);
}
else
{
WriteBytes(client, sendBytes);
}
}
});
if (asyncServerOps)
{
await server.WaitForConnectionAsync();
if (pair.writeToServer)
{
await WriteBytesAsync(server, sendBytes);
}
else
{
received = await ReadBytesAsync(server, sendBytes.Length);
}
}
else
{
server.WaitForConnection();
if (pair.writeToServer)
{
WriteBytes(server, sendBytes);
}
else
{
received = ReadBytes(server, sendBytes.Length);
}
}
await clientTask;
Assert.Equal(sendBytes, received);
server.Disconnect();
Assert.False(server.IsConnected);
}
}
[Fact]
public async Task ClonedServer_ActsAsOriginalServer()
{
byte[] msg1 = new byte[] { 5, 7, 9, 10 };
byte[] received1 = new byte[] { 0, 0, 0, 0 };
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream serverBase = pair.serverStream;
NamedPipeClientStream client = pair.clientStream;
pair.Connect();
if (pair.writeToServer)
{
Task<int> clientTask = client.ReadAsync(received1, 0, received1.Length);
using (NamedPipeServerStream server = new NamedPipeServerStream(PipeDirection.Out, false, true, serverBase.SafePipeHandle))
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
Assert.Equal(1, client.NumberOfServerInstances);
}
server.Write(msg1, 0, msg1.Length);
int receivedLength = await clientTask;
Assert.Equal(msg1.Length, receivedLength);
Assert.Equal(msg1, received1);
}
}
else
{
Task clientTask = client.WriteAsync(msg1, 0, msg1.Length);
using (NamedPipeServerStream server = new NamedPipeServerStream(PipeDirection.In, false, true, serverBase.SafePipeHandle))
{
int receivedLength = server.Read(received1, 0, msg1.Length);
Assert.Equal(msg1.Length, receivedLength);
Assert.Equal(msg1, received1);
await clientTask;
}
}
}
}
[Fact]
public async Task ClonedClient_ActsAsOriginalClient()
{
byte[] msg1 = new byte[] { 5, 7, 9, 10 };
byte[] received1 = new byte[] { 0, 0, 0, 0 };
using (NamedPipePair pair = CreateNamedPipePair())
{
pair.Connect();
NamedPipeServerStream server = pair.serverStream;
if (pair.writeToServer)
{
using (NamedPipeClientStream client = new NamedPipeClientStream(PipeDirection.In, false, true, pair.clientStream.SafePipeHandle))
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
Assert.Equal(1, client.NumberOfServerInstances);
}
Task<int> clientTask = client.ReadAsync(received1, 0, received1.Length);
server.Write(msg1, 0, msg1.Length);
int receivedLength = await clientTask;
Assert.Equal(msg1.Length, receivedLength);
Assert.Equal(msg1, received1);
}
}
else
{
using (NamedPipeClientStream client = new NamedPipeClientStream(PipeDirection.Out, false, true, pair.clientStream.SafePipeHandle))
{
Task clientTask = client.WriteAsync(msg1, 0, msg1.Length);
int receivedLength = server.Read(received1, 0, msg1.Length);
Assert.Equal(msg1.Length, receivedLength);
Assert.Equal(msg1, received1);
await clientTask;
}
}
}
}
[Fact]
public void ConnectOnAlreadyConnectedClient_Throws_InvalidOperationException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
Assert.True(client.IsConnected);
Assert.True(server.IsConnected);
Assert.Throws<InvalidOperationException>(() => client.Connect());
}
}
[Fact]
public void WaitForConnectionOnAlreadyConnectedServer_Throws_InvalidOperationException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
Assert.True(client.IsConnected);
Assert.True(server.IsConnected);
Assert.Throws<InvalidOperationException>(() => server.WaitForConnection());
}
}
[Fact]
public async Task CancelTokenOn_ServerWaitForConnectionAsync_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
var ctx = new CancellationTokenSource();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) // cancellation token after the operation has been initiated
{
Task serverWaitTimeout = server.WaitForConnectionAsync(ctx.Token);
ctx.Cancel();
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverWaitTimeout);
}
ctx.Cancel();
Assert.True(server.WaitForConnectionAsync(ctx.Token).IsCanceled);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // P/Invoking to Win32 functions
public async Task CancelTokenOff_ServerWaitForConnectionAsyncWithOuterCancellation_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
Task waitForConnectionTask = server.WaitForConnectionAsync(CancellationToken.None);
Assert.True(Interop.CancelIoEx(server.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => waitForConnectionTask);
Assert.True(waitForConnectionTask.IsCanceled);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // P/Invoking to Win32 functions
public async Task CancelTokenOn_ServerWaitForConnectionAsyncWithOuterCancellation_Throws_IOException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
var cts = new CancellationTokenSource();
NamedPipeServerStream server = pair.serverStream;
Task waitForConnectionTask = server.WaitForConnectionAsync(cts.Token);
Assert.True(Interop.CancelIoEx(server.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAsync<IOException>(() => waitForConnectionTask);
}
}
[Fact]
public async Task OperationsOnDisconnectedServer()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
pair.Connect();
Assert.Throws<InvalidOperationException>(() => server.IsMessageComplete);
Assert.Throws<InvalidOperationException>(() => server.WaitForConnection());
await Assert.ThrowsAsync<InvalidOperationException>(() => server.WaitForConnectionAsync()); // fails because allowed connections is set to 1
server.Disconnect();
Assert.Throws<InvalidOperationException>(() => server.Disconnect()); // double disconnect
byte[] buffer = new byte[] { 0, 0, 0, 0 };
if (pair.writeToServer)
{
Assert.Throws<InvalidOperationException>(() => server.Write(buffer, 0, buffer.Length));
Assert.Throws<InvalidOperationException>(() => server.WriteByte(5));
Assert.Throws<InvalidOperationException>(() => { server.WriteAsync(buffer, 0, buffer.Length); });
}
else
{
Assert.Throws<InvalidOperationException>(() => server.Read(buffer, 0, buffer.Length));
Assert.Throws<InvalidOperationException>(() => server.ReadByte());
Assert.Throws<InvalidOperationException>(() => { server.ReadAsync(buffer, 0, buffer.Length); });
}
Assert.Throws<InvalidOperationException>(() => server.Flush());
Assert.Throws<InvalidOperationException>(() => server.IsMessageComplete);
Assert.Throws<InvalidOperationException>(() => server.GetImpersonationUserName());
}
}
[Fact]
public virtual async Task OperationsOnDisconnectedClient()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
NamedPipeClientStream client = pair.clientStream;
pair.Connect();
Assert.Throws<InvalidOperationException>(() => client.IsMessageComplete);
Assert.Throws<InvalidOperationException>(() => client.Connect());
await Assert.ThrowsAsync<InvalidOperationException>(() => client.ConnectAsync());
server.Disconnect();
byte[] buffer = new byte[] { 0, 0, 0, 0 };
if (!pair.writeToServer)
{
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) // writes on Unix may still succeed after other end disconnects, due to socket being used
{
// Pipe is broken
Assert.Throws<IOException>(() => client.Write(buffer, 0, buffer.Length));
Assert.Throws<IOException>(() => client.WriteByte(5));
Assert.Throws<IOException>(() => { client.WriteAsync(buffer, 0, buffer.Length); });
Assert.Throws<IOException>(() => client.Flush());
Assert.Throws<IOException>(() => client.NumberOfServerInstances);
}
}
else
{
// Nothing for the client to read, but no exception throwing
Assert.Equal(0, client.Read(buffer, 0, buffer.Length));
Assert.Equal(-1, client.ReadByte());
if (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) // NumberOfServerInstances not supported on Unix
{
Assert.Throws<PlatformNotSupportedException>(() => client.NumberOfServerInstances);
}
}
Assert.Throws<InvalidOperationException>(() => client.IsMessageComplete);
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // Unix implemented on sockets, where disposal information doesn't propagate
public async Task Windows_OperationsOnNamedServerWithDisposedClient()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
pair.Connect();
pair.clientStream.Dispose();
Assert.Throws<IOException>(() => server.WaitForConnection());
await Assert.ThrowsAsync<IOException>(() => server.WaitForConnectionAsync());
Assert.Throws<IOException>(() => server.GetImpersonationUserName());
}
}
[ConditionalFact(nameof(PlatformDetection) + "." + nameof(PlatformDetection.IsNotWindowsSubsystemForLinux))] // https://github.com/Microsoft/BashOnWindows/issues/1011
[PlatformSpecific(TestPlatforms.AnyUnix)] // Unix implemented on sockets, where disposal information doesn't propagate
public async Task Unix_OperationsOnNamedServerWithDisposedClient()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
pair.Connect();
pair.clientStream.Dispose();
// On Unix, the server still thinks that it is connected after client Disposal.
Assert.Throws<InvalidOperationException>(() => server.WaitForConnection());
await Assert.ThrowsAsync<InvalidOperationException>(() => server.WaitForConnectionAsync());
Assert.NotNull(server.GetImpersonationUserName());
}
}
[Fact]
public void OperationsOnUnconnectedServer()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
// doesn't throw exceptions
PipeTransmissionMode transmitMode = server.TransmissionMode;
Assert.Throws<ArgumentOutOfRangeException>(() => server.ReadMode = (PipeTransmissionMode)999);
byte[] buffer = new byte[] { 0, 0, 0, 0 };
if (pair.writeToServer)
{
Assert.Equal(0, server.OutBufferSize);
Assert.Throws<InvalidOperationException>(() => server.Write(buffer, 0, buffer.Length));
Assert.Throws<InvalidOperationException>(() => server.WriteByte(5));
Assert.Throws<InvalidOperationException>(() => { server.WriteAsync(buffer, 0, buffer.Length); });
}
else
{
Assert.Equal(0, server.InBufferSize);
PipeTransmissionMode readMode = server.ReadMode;
Assert.Throws<InvalidOperationException>(() => server.Read(buffer, 0, buffer.Length));
Assert.Throws<InvalidOperationException>(() => server.ReadByte());
Assert.Throws<InvalidOperationException>(() => { server.ReadAsync(buffer, 0, buffer.Length); });
}
Assert.Throws<InvalidOperationException>(() => server.Disconnect()); // disconnect when not connected
Assert.Throws<InvalidOperationException>(() => server.IsMessageComplete);
}
}
[Fact]
public void OperationsOnUnconnectedClient()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
if (client.CanRead)
{
Assert.Throws<InvalidOperationException>(() => client.Read(buffer, 0, buffer.Length));
Assert.Throws<InvalidOperationException>(() => client.ReadByte());
Assert.Throws<InvalidOperationException>(() => { client.ReadAsync(buffer, 0, buffer.Length); });
Assert.Throws<InvalidOperationException>(() => client.ReadMode);
Assert.Throws<InvalidOperationException>(() => client.ReadMode = PipeTransmissionMode.Byte);
}
if (client.CanWrite)
{
Assert.Throws<InvalidOperationException>(() => client.Write(buffer, 0, buffer.Length));
Assert.Throws<InvalidOperationException>(() => client.WriteByte(5));
Assert.Throws<InvalidOperationException>(() => { client.WriteAsync(buffer, 0, buffer.Length); });
}
Assert.Throws<InvalidOperationException>(() => client.NumberOfServerInstances);
Assert.Throws<InvalidOperationException>(() => client.TransmissionMode);
Assert.Throws<InvalidOperationException>(() => client.InBufferSize);
Assert.Throws<InvalidOperationException>(() => client.OutBufferSize);
Assert.Throws<InvalidOperationException>(() => client.SafePipeHandle);
}
}
[Fact]
public async Task DisposedServerPipe_Throws_ObjectDisposedException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream pipe = pair.serverStream;
pipe.Dispose();
byte[] buffer = new byte[] { 0, 0, 0, 0 };
Assert.Throws<ObjectDisposedException>(() => pipe.Disconnect());
Assert.Throws<ObjectDisposedException>(() => pipe.GetImpersonationUserName());
Assert.Throws<ObjectDisposedException>(() => pipe.WaitForConnection());
await Assert.ThrowsAsync<ObjectDisposedException>(() => pipe.WaitForConnectionAsync());
}
}
[Fact]
public async Task DisposedClientPipe_Throws_ObjectDisposedException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
pair.Connect();
NamedPipeClientStream pipe = pair.clientStream;
pipe.Dispose();
byte[] buffer = new byte[] { 0, 0, 0, 0 };
Assert.Throws<ObjectDisposedException>(() => pipe.Connect());
await Assert.ThrowsAsync<ObjectDisposedException>(() => pipe.ConnectAsync());
Assert.Throws<ObjectDisposedException>(() => pipe.NumberOfServerInstances);
}
}
[Fact]
public async Task ReadAsync_DisconnectDuringRead_Returns0()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
pair.Connect();
Task<int> readTask;
if (pair.clientStream.CanRead)
{
readTask = pair.clientStream.ReadAsync(new byte[1], 0, 1);
pair.serverStream.Dispose();
}
else
{
readTask = pair.serverStream.ReadAsync(new byte[1], 0, 1);
pair.clientStream.Dispose();
}
Assert.Equal(0, await readTask);
}
}
[PlatformSpecific(TestPlatforms.Windows)] // Unix named pipes are on sockets, where small writes with an empty buffer will succeed immediately
[Fact]
public async Task WriteAsync_DisconnectDuringWrite_Throws()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
pair.Connect();
Task writeTask;
if (pair.clientStream.CanWrite)
{
writeTask = pair.clientStream.WriteAsync(new byte[1], 0, 1);
pair.serverStream.Dispose();
}
else
{
writeTask = pair.serverStream.WriteAsync(new byte[1], 0, 1);
pair.clientStream.Dispose();
}
await Assert.ThrowsAsync<IOException>(() => writeTask);
}
}
[Fact]
[ActiveIssue("dotnet/corefx #16934", TargetFrameworkMonikers.NetFramework)] //Hangs forever in desktop as it doesn't have cancellation support
public async Task Server_ReadWriteCancelledToken_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
if (server.CanRead && client.CanWrite)
{
var ctx1 = new CancellationTokenSource();
Task<int> serverReadToken = server.ReadAsync(buffer, 0, buffer.Length, ctx1.Token);
ctx1.Cancel();
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverReadToken);
ctx1.Cancel();
Assert.True(server.ReadAsync(buffer, 0, buffer.Length, ctx1.Token).IsCanceled);
}
if (server.CanWrite)
{
var ctx1 = new CancellationTokenSource();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) // On Unix WriteAsync's aren't cancelable once initiated
{
Task serverWriteToken = server.WriteAsync(buffer, 0, buffer.Length, ctx1.Token);
ctx1.Cancel();
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverWriteToken);
}
ctx1.Cancel();
Assert.True(server.WriteAsync(buffer, 0, buffer.Length, ctx1.Token).IsCanceled);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // P/Invoking to Win32 functions
public async Task CancelTokenOff_Server_ReadWriteCancelledToken_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
if (server.CanRead)
{
Task serverReadToken = server.ReadAsync(buffer, 0, buffer.Length, CancellationToken.None);
Assert.True(Interop.CancelIoEx(server.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverReadToken);
Assert.True(serverReadToken.IsCanceled);
}
if (server.CanWrite)
{
Task serverWriteToken = server.WriteAsync(buffer, 0, buffer.Length, CancellationToken.None);
Assert.True(Interop.CancelIoEx(server.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverWriteToken);
Assert.True(serverWriteToken.IsCanceled);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // P/Invoking to Win32 functions
public async Task CancelTokenOn_Server_ReadWriteCancelledToken_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeServerStream server = pair.serverStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
if (server.CanRead)
{
var cts = new CancellationTokenSource();
Task serverReadToken = server.ReadAsync(buffer, 0, buffer.Length, cts.Token);
Assert.True(Interop.CancelIoEx(server.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverReadToken);
}
if (server.CanWrite)
{
var cts = new CancellationTokenSource();
Task serverWriteToken = server.WriteAsync(buffer, 0, buffer.Length, cts.Token);
Assert.True(Interop.CancelIoEx(server.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverWriteToken);
}
}
}
[Fact]
[ActiveIssue("dotnet/corefx #16934", TargetFrameworkMonikers.NetFramework)] //Hangs forever in desktop as it doesn't have cancellation support
public async Task Client_ReadWriteCancelledToken_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
if (client.CanRead)
{
var ctx1 = new CancellationTokenSource();
Task serverReadToken = client.ReadAsync(buffer, 0, buffer.Length, ctx1.Token);
ctx1.Cancel();
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverReadToken);
Assert.True(client.ReadAsync(buffer, 0, buffer.Length, ctx1.Token).IsCanceled);
}
if (client.CanWrite)
{
var ctx1 = new CancellationTokenSource();
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) // On Unix WriteAsync's aren't cancelable once initiated
{
Task serverWriteToken = client.WriteAsync(buffer, 0, buffer.Length, ctx1.Token);
ctx1.Cancel();
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => serverWriteToken);
}
ctx1.Cancel();
Assert.True(client.WriteAsync(buffer, 0, buffer.Length, ctx1.Token).IsCanceled);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // P/Invoking to Win32 functions
public async Task CancelTokenOff_Client_ReadWriteCancelledToken_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
if (client.CanRead)
{
Task clientReadToken = client.ReadAsync(buffer, 0, buffer.Length, CancellationToken.None);
Assert.True(Interop.CancelIoEx(client.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => clientReadToken);
Assert.True(clientReadToken.IsCanceled);
}
if (client.CanWrite)
{
Task clientWriteToken = client.WriteAsync(buffer, 0, buffer.Length, CancellationToken.None);
Assert.True(Interop.CancelIoEx(client.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => clientWriteToken);
Assert.True(clientWriteToken.IsCanceled);
}
}
}
[Fact]
[PlatformSpecific(TestPlatforms.Windows)] // P/Invoking to Win32 functions
public async Task CancelTokenOn_Client_ReadWriteCancelledToken_Throws_OperationCanceledException()
{
using (NamedPipePair pair = CreateNamedPipePair())
{
NamedPipeClientStream client = pair.clientStream;
byte[] buffer = new byte[] { 0, 0, 0, 0 };
pair.Connect();
if (client.CanRead)
{
var cts = new CancellationTokenSource();
Task clientReadToken = client.ReadAsync(buffer, 0, buffer.Length, cts.Token);
Assert.True(Interop.CancelIoEx(client.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => clientReadToken);
}
if (client.CanWrite)
{
var cts = new CancellationTokenSource();
Task clientWriteToken = client.WriteAsync(buffer, 0, buffer.Length, cts.Token);
Assert.True(Interop.CancelIoEx(client.SafePipeHandle), "Outer cancellation failed");
await Assert.ThrowsAnyAsync<OperationCanceledException>(() => clientWriteToken);
}
}
}
[Theory]
[InlineData(true)]
[InlineData(false)]
public async Task ManyConcurrentOperations(bool cancelable)
{
using (NamedPipePair pair = CreateNamedPipePair(PipeOptions.Asynchronous, PipeOptions.Asynchronous))
{
await Task.WhenAll(pair.serverStream.WaitForConnectionAsync(), pair.clientStream.ConnectAsync());
const int NumOps = 100;
const int DataPerOp = 512;
byte[] sendingData = new byte[NumOps * DataPerOp];
byte[] readingData = new byte[sendingData.Length];
new Random().NextBytes(sendingData);
var cancellationToken = cancelable ? new CancellationTokenSource().Token : CancellationToken.None;
Stream reader = pair.writeToServer ? (Stream)pair.clientStream : pair.serverStream;
Stream writer = pair.writeToServer ? (Stream)pair.serverStream : pair.clientStream;
var reads = new Task<int>[NumOps];
var writes = new Task[NumOps];
for (int i = 0; i < reads.Length; i++)
reads[i] = reader.ReadAsync(readingData, i * DataPerOp, DataPerOp, cancellationToken);
for (int i = 0; i < reads.Length; i++)
writes[i] = writer.WriteAsync(sendingData, i * DataPerOp, DataPerOp, cancellationToken);
const int WaitTimeout = 30000;
Assert.True(Task.WaitAll(writes, WaitTimeout));
Assert.True(Task.WaitAll(reads, WaitTimeout));
// The data of each write may not be written atomically, and as such some of the data may be
// interleaved rather than entirely in the order written.
Assert.Equal(sendingData.OrderBy(b => b), readingData.OrderBy(b => b));
}
}
}
[ActiveIssue(22271, TargetFrameworkMonikers.UapNotUapAot)]
public class NamedPipeTest_Simple_ServerInOutRead_ClientInOutWrite : NamedPipeTest_Simple
{
protected override NamedPipePair CreateNamedPipePair(PipeOptions serverOptions, PipeOptions clientOptions)
{
NamedPipePair ret = new NamedPipePair();
string pipeName = GetUniquePipeName();
ret.serverStream = new NamedPipeServerStream(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, serverOptions);
ret.clientStream = new NamedPipeClientStream(".", pipeName, PipeDirection.InOut, clientOptions);
ret.writeToServer = false;
return ret;
}
}
[ActiveIssue(22271, TargetFrameworkMonikers.UapNotUapAot)]
public class NamedPipeTest_Simple_ServerInOutWrite_ClientInOutRead : NamedPipeTest_Simple
{
protected override NamedPipePair CreateNamedPipePair(PipeOptions serverOptions, PipeOptions clientOptions)
{
NamedPipePair ret = new NamedPipePair();
string pipeName = GetUniquePipeName();
ret.serverStream = new NamedPipeServerStream(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, serverOptions);
ret.clientStream = new NamedPipeClientStream(".", pipeName, PipeDirection.InOut, clientOptions);
ret.writeToServer = true;
return ret;
}
}
[ActiveIssue(22271, TargetFrameworkMonikers.UapNotUapAot)]
public class NamedPipeTest_Simple_ServerInOut_ClientIn : NamedPipeTest_Simple
{
protected override NamedPipePair CreateNamedPipePair(PipeOptions serverOptions, PipeOptions clientOptions)
{
NamedPipePair ret = new NamedPipePair();
string pipeName = GetUniquePipeName();
ret.serverStream = new NamedPipeServerStream(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, serverOptions);
ret.clientStream = new NamedPipeClientStream(".", pipeName, PipeDirection.In, clientOptions);
ret.writeToServer = true;
return ret;
}
}
[ActiveIssue(22271, TargetFrameworkMonikers.UapNotUapAot)]
public class NamedPipeTest_Simple_ServerInOut_ClientOut : NamedPipeTest_Simple
{
protected override NamedPipePair CreateNamedPipePair(PipeOptions serverOptions, PipeOptions clientOptions)
{
NamedPipePair ret = new NamedPipePair();
string pipeName = GetUniquePipeName();
ret.serverStream = new NamedPipeServerStream(pipeName, PipeDirection.InOut, 1, PipeTransmissionMode.Byte, serverOptions);
ret.clientStream = new NamedPipeClientStream(".", pipeName, PipeDirection.Out, clientOptions);
ret.writeToServer = false;
return ret;
}
}
[ActiveIssue(22271, TargetFrameworkMonikers.UapNotUapAot)]
public class NamedPipeTest_Simple_ServerOut_ClientIn : NamedPipeTest_Simple
{
protected override NamedPipePair CreateNamedPipePair(PipeOptions serverOptions, PipeOptions clientOptions)
{
NamedPipePair ret = new NamedPipePair();
string pipeName = GetUniquePipeName();
ret.serverStream = new NamedPipeServerStream(pipeName, PipeDirection.Out, 1, PipeTransmissionMode.Byte, serverOptions);
ret.clientStream = new NamedPipeClientStream(".", pipeName, PipeDirection.In, clientOptions);
ret.writeToServer = true;
return ret;
}
}
[ActiveIssue(22271, TargetFrameworkMonikers.UapNotUapAot)]
public class NamedPipeTest_Simple_ServerIn_ClientOut : NamedPipeTest_Simple
{
protected override NamedPipePair CreateNamedPipePair(PipeOptions serverOptions, PipeOptions clientOptions)
{
NamedPipePair ret = new NamedPipePair();
string pipeName = GetUniquePipeName();
ret.serverStream = new NamedPipeServerStream(pipeName, PipeDirection.In, 1, PipeTransmissionMode.Byte, serverOptions);
ret.clientStream = new NamedPipeClientStream(".", pipeName, PipeDirection.Out, clientOptions);
ret.writeToServer = false;
return ret;
}
}
}
| |
using System;
using System.Data;
using System.Data.SqlClient;
using Csla;
using Csla.Data;
namespace Invoices.Business
{
/// <summary>
/// ProductTypeItem (editable child object).<br/>
/// This is a generated <see cref="ProductTypeItem"/> business object.
/// </summary>
/// <remarks>
/// This class is an item of <see cref="ProductTypeColl"/> collection.
/// </remarks>
[Serializable]
public partial class ProductTypeItem : BusinessBase<ProductTypeItem>
{
#region Static Fields
private static int _lastId;
#endregion
#region Business Properties
/// <summary>
/// Maintains metadata about <see cref="ProductTypeId"/> property.
/// </summary>
[NotUndoable]
public static readonly PropertyInfo<int> ProductTypeIdProperty = RegisterProperty<int>(p => p.ProductTypeId, "Product Type Id");
/// <summary>
/// Gets the Product Type Id.
/// </summary>
/// <value>The Product Type Id.</value>
public int ProductTypeId
{
get { return GetProperty(ProductTypeIdProperty); }
}
/// <summary>
/// Maintains metadata about <see cref="Name"/> property.
/// </summary>
public static readonly PropertyInfo<string> NameProperty = RegisterProperty<string>(p => p.Name, "Name");
/// <summary>
/// Gets or sets the Name.
/// </summary>
/// <value>The Name.</value>
public string Name
{
get { return GetProperty(NameProperty); }
set { SetProperty(NameProperty, value); }
}
#endregion
#region Constructor
/// <summary>
/// Initializes a new instance of the <see cref="ProductTypeItem"/> class.
/// </summary>
/// <remarks> Do not use to create a Csla object. Use factory methods instead.</remarks>
[System.ComponentModel.EditorBrowsable(System.ComponentModel.EditorBrowsableState.Never)]
public ProductTypeItem()
{
// Use factory methods and do not use direct creation.
Saved += OnProductTypeItemSaved;
// show the framework that this is a child object
MarkAsChild();
}
#endregion
#region Data Access
/// <summary>
/// Loads default values for the <see cref="ProductTypeItem"/> object properties.
/// </summary>
[RunLocal]
protected override void Child_Create()
{
LoadProperty(ProductTypeIdProperty, System.Threading.Interlocked.Decrement(ref _lastId));
var args = new DataPortalHookArgs();
OnCreate(args);
base.Child_Create();
}
/// <summary>
/// Loads a <see cref="ProductTypeItem"/> object from the given SafeDataReader.
/// </summary>
/// <param name="dr">The SafeDataReader to use.</param>
private void Child_Fetch(SafeDataReader dr)
{
// Value properties
LoadProperty(ProductTypeIdProperty, dr.GetInt32("ProductTypeId"));
LoadProperty(NameProperty, dr.GetString("Name"));
var args = new DataPortalHookArgs(dr);
OnFetchRead(args);
// check all object rules and property rules
BusinessRules.CheckRules();
}
/// <summary>
/// Inserts a new <see cref="ProductTypeItem"/> object in the database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Insert()
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager(Database.InvoicesConnection, false))
{
using (var cmd = new SqlCommand("dbo.AddProductTypeItem", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@ProductTypeId", ReadProperty(ProductTypeIdProperty)).Direction = ParameterDirection.Output;
cmd.Parameters.AddWithValue("@Name", ReadProperty(NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnInsertPre(args);
cmd.ExecuteNonQuery();
OnInsertPost(args);
LoadProperty(ProductTypeIdProperty, (int) cmd.Parameters["@ProductTypeId"].Value);
}
}
}
/// <summary>
/// Updates in the database all changes made to the <see cref="ProductTypeItem"/> object.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_Update()
{
if (!IsDirty)
return;
using (var ctx = ConnectionManager<SqlConnection>.GetManager(Database.InvoicesConnection, false))
{
using (var cmd = new SqlCommand("dbo.UpdateProductTypeItem", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@ProductTypeId", ReadProperty(ProductTypeIdProperty)).DbType = DbType.Int32;
cmd.Parameters.AddWithValue("@Name", ReadProperty(NameProperty)).DbType = DbType.String;
var args = new DataPortalHookArgs(cmd);
OnUpdatePre(args);
cmd.ExecuteNonQuery();
OnUpdatePost(args);
}
}
}
/// <summary>
/// Self deletes the <see cref="ProductTypeItem"/> object from database.
/// </summary>
[Transactional(TransactionalTypes.TransactionScope)]
private void Child_DeleteSelf()
{
using (var ctx = ConnectionManager<SqlConnection>.GetManager(Database.InvoicesConnection, false))
{
using (var cmd = new SqlCommand("dbo.DeleteProductTypeItem", ctx.Connection))
{
cmd.CommandType = CommandType.StoredProcedure;
cmd.Parameters.AddWithValue("@ProductTypeId", ReadProperty(ProductTypeIdProperty)).DbType = DbType.Int32;
var args = new DataPortalHookArgs(cmd);
OnDeletePre(args);
cmd.ExecuteNonQuery();
OnDeletePost(args);
}
}
}
#endregion
#region Saved Event
// TODO: edit "ProductTypeItem.cs", uncomment the "OnDeserialized" method and add the following line:
// TODO: Saved += OnProductTypeItemSaved;
private void OnProductTypeItemSaved(object sender, Csla.Core.SavedEventArgs e)
{
if (ProductTypeItemSaved != null)
ProductTypeItemSaved(sender, e);
}
/// <summary> Use this event to signal a <see cref="ProductTypeItem"/> object was saved.</summary>
public static event EventHandler<Csla.Core.SavedEventArgs> ProductTypeItemSaved;
#endregion
#region DataPortal Hooks
/// <summary>
/// Occurs after setting all defaults for object creation.
/// </summary>
partial void OnCreate(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after setting query parameters and before the delete operation.
/// </summary>
partial void OnDeletePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Delete, after the delete operation, before Commit().
/// </summary>
partial void OnDeletePost(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the fetch operation.
/// </summary>
partial void OnFetchPre(DataPortalHookArgs args);
/// <summary>
/// Occurs after the fetch operation (object or collection is fully loaded and set up).
/// </summary>
partial void OnFetchPost(DataPortalHookArgs args);
/// <summary>
/// Occurs after the low level fetch operation, before the data reader is destroyed.
/// </summary>
partial void OnFetchRead(DataPortalHookArgs args);
/// <summary>
/// Occurs after setting query parameters and before the update operation.
/// </summary>
partial void OnUpdatePre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the update operation, before setting back row identifiers (RowVersion) and Commit().
/// </summary>
partial void OnUpdatePost(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after setting query parameters and before the insert operation.
/// </summary>
partial void OnInsertPre(DataPortalHookArgs args);
/// <summary>
/// Occurs in DataPortal_Insert, after the insert operation, before setting back row identifiers (ID and RowVersion) and Commit().
/// </summary>
partial void OnInsertPost(DataPortalHookArgs args);
#endregion
}
}
| |
//Copyright ?2014 Sony Computer Entertainment America LLC. See License.txt.
using System;
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Drawing;
using System.IO;
using System.Windows.Forms;
using Sce.Atf;
using Sce.Atf.Applications;
using Sce.Atf.Controls.PropertyEditing;
namespace FileExplorerSample
{
/// <summary>
/// The FileViewer displays the contents of a folder using a ListView control. It uses
/// the ATF ListViewAdapter to synchronize the contents of the control to the folder. In
/// order to use ListViewAdapter, it uses a private class, FileListView, to implement
/// the required IListView/IItemView interfaces on a file folder. IObservableContext is
/// also implemented and could be hooked up to file system events using file watcher
/// support in .NET.</summary>
[Export(typeof(IInitializable))]
[Export(typeof(FileViewer))]
[PartCreationPolicy(CreationPolicy.Shared)]
public class FileViewer : IInitializable
{
/// <summary>
/// Constructor that creates a standard WinForms ListView control</summary>
/// <param name="mainForm">Main form</param>
[ImportingConstructor]
public FileViewer(MainForm mainForm)
{
m_mainForm = mainForm;
// create a standard WinForms ListView control
m_listView = new ListView();
m_listView.Dock = DockStyle.Fill;
m_listView.Text = "File Viewer";
m_listView.BackColor = SystemColors.Window;
m_listView.SmallImageList = ResourceUtil.GetImageList16();
m_listView.AllowColumnReorder = true;
// create an adapter to drive the ListView control
m_listViewAdapter = new ListViewAdapter(m_listView);
m_fileListView = new FileListView();
}
[Import(AllowDefault=true)] // optional service
private ISettingsService m_settingsService = null;
[ImportMany] // gets all file data extensions
private IEnumerable<Lazy<IFileDataExtension>> m_extensions = null;
#region IInitializable Members
/// <summary>
/// Finishes initializing component by creating data extension list and set control parameters</summary>
void IInitializable.Initialize()
{
// pass all file data extensions to adapter
List<IFileDataExtension> list = new List<IFileDataExtension>();
foreach (Lazy<IFileDataExtension> extension in m_extensions)
list.Add(extension.Value);
m_fileListView.FileDataExtensions = list.ToArray();
// set the adapter's ListView to an adapter that returns directory contents
m_listViewAdapter.ListView = m_fileListView;
m_mainForm.SplitContainer.Panel2.Controls.Add(m_listView);
m_settingsService.RegisterSettings(this, new BoundPropertyDescriptor(this, () => ListViewSettings, "ListViewSettings", null, null));
}
#endregion
/// <summary>
/// Gets or sets the path to the folder whose contents are displayed</summary>
public string Path
{
get { return m_fileListView.Path; }
set
{
m_fileListView.Path = value;
m_mainForm.Text = value;
}
}
/// <summary>
/// Gets or sets the ListView's state for saving/restoring its state. This property
/// is registered with the SettingsService.</summary>
public string ListViewSettings
{
get { return m_listViewAdapter.Settings; }
set { m_listViewAdapter.Settings = value; }
}
/// <summary>
/// Adapts a directory to an observable list of items</summary>
private class FileListView : IListView, IItemView, IObservableContext
{
public FileListView()
{
// inhibit compiler warnings; we never raise these events, though it would be
// possible, using the file watcher support in .Net
if (ItemInserted == null) return;
if (ItemRemoved == null) return;
if (ItemChanged == null) return;
}
/// <summary>
/// Gets or sets the directory path to adapt to a list</summary>
public string Path
{
get { return m_path; }
set
{
if (m_path != value)
{
m_path = value;
Reloaded.Raise(this, EventArgs.Empty);
}
}
}
private string m_path;
public IFileDataExtension[] FileDataExtensions
{
get { return m_fileDataExtensions; }
set { m_fileDataExtensions = value; }
}
private IFileDataExtension[] m_fileDataExtensions;
#region IListView Members
/// <summary>
/// Gets names for file list view columns</summary>
public string[] ColumnNames
{
get
{
string[] result = new string[m_fileDataExtensions.Length];
for (int i = 0; i < result.Length; i++)
result[i] = m_fileDataExtensions[i].ColumnName;
return result;
}
}
/// <summary>
/// Gets the items in the list</summary>
public IEnumerable<object> Items
{
get
{
if (m_path == null ||
!Directory.Exists(m_path))
{
return EmptyEnumerable<object>.Instance;
}
DirectoryInfo directory = new DirectoryInfo(m_path);
DirectoryInfo[] subDirectories = null;
try
{
subDirectories = directory.GetDirectories();
}
catch
{
}
if (subDirectories == null)
subDirectories = new DirectoryInfo[0];
FileInfo[] files = null;
try
{
files = directory.GetFiles();
}
catch
{
}
if (files == null)
files = new FileInfo[0];
List<object> children = new List<object>(subDirectories.Length + files.Length);
children.AddRange(subDirectories);
children.AddRange(files);
return children;
}
}
#endregion
#region IItemView Members
/// <summary>
/// Gets display info for the item</summary>
/// <param name="item">Item</param>
/// <param name="info">Display info for item</param>
public void GetInfo(object item, ItemInfo info)
{
// set the first column info (name)
FileSystemInfo fileSystemInfo = item as FileSystemInfo;
if (fileSystemInfo is DirectoryInfo)
{
info.Label = fileSystemInfo.Name;
info.ImageIndex = info.GetImageList().Images.IndexOfKey(Resources.FolderImage);
}
else if (fileSystemInfo is FileInfo)
{
info.Label = fileSystemInfo.Name;
info.ImageIndex = info.GetImageList().Images.IndexOfKey(Resources.DocumentImage);
info.IsLeaf = true;
}
// set the 2nd and 3nd columns info (size and creation time)
info.Properties = new string[m_fileDataExtensions.Length-1];
for (int i = 0; i < info.Properties.Length; i++)
info.Properties[i] = m_fileDataExtensions[i+1].GetValue(fileSystemInfo);
}
#endregion
#region IObservableContext Members
/// <summary>
/// Event that is raised when an item is inserted</summary>
public event EventHandler<ItemInsertedEventArgs<object>> ItemInserted;
/// <summary>
/// Event that is raised when an item is removed</summary>
public event EventHandler<ItemRemovedEventArgs<object>> ItemRemoved;
/// <summary>
/// Event that is raised when an item is changed</summary>
public event EventHandler<ItemChangedEventArgs<object>> ItemChanged;
/// <summary>
/// Event that is raised when the collection has been reloaded</summary>
public event EventHandler Reloaded;
#endregion
static FileListView()
{
#pragma warning disable 0219
string dummy = Resources.FolderImage; // force initialization of image resources
}
}
private MainForm m_mainForm;
private ListView m_listView;
private FileListView m_fileListView;
private ListViewAdapter m_listViewAdapter;
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Text;
using System.Threading;
using Apache.Geode.Client.Tests;
using Apache.Geode.DUnitFramework;
namespace Apache.Geode.Client.FwkLib
{
using Apache.Geode.Client;
//using Region = Apache.Geode.Client.IRegion<Object, Object>;
public class InitPerfStat : ClientTask
{
public Int32 m_cnt;
public static PerfStat[] perfstat = new PerfStat[10];
public InitPerfStat()
: base()
{
m_cnt = 0;
}
public override void DoTask(int iters, object data)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
perfstat[localcnt] = new PerfStat(Thread.CurrentThread.ManagedThreadId);
}
}
public class PutAllTask<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private TVal[] m_values;
#endregion
#region Public accessors
public TVal[] Values
{
get
{
return m_values;
}
set
{
m_values = value;
}
}
#endregion
public PutAllTask(IRegion<TKey, TVal> region, TKey[] keys,
TVal[] values)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_values = values as TVal[];
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
IDictionary<TKey, TVal> map = new Dictionary<TKey, TVal>();
//CacheableHashMap map = new CacheableHashMap();
map.Clear();
Util.Log("PutAllTask::DoTask: starting {0} iterations.", iters);
while (Running && (iters-- != 0))
{
int idx = count % numKeys;
try
{
map.Add(m_keys[idx], m_values[idx]);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
DateTime startTime = DateTime.Now;
m_region.PutAll(map, 60);
DateTime endTime = DateTime.Now;
TimeSpan elapsedTime = endTime - startTime;
FwkTest<TKey, TVal>.CurrentTest.FwkInfo("Time Taken to execute putAll for {0}" +
" is {1}ms", numKeys, elapsedTime.TotalMilliseconds);
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class GetTask<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
bool m_isMainWorkLoad;
public Int32 m_cnt;
#endregion
public GetTask(IRegion<TKey, TVal> region, TKey[] keys, bool isMainWorkLoad)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_isMainWorkLoad = isMainWorkLoad;
m_cnt = 0;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
while (Running && (iters-- != 0))
{
object val = null;
int idx = count % numKeys;
try
{
startTime = InitPerfStat.perfstat[localcnt].StartGet();
val = m_region[m_keys[idx]];
//val = m_region.Get(m_keys[idx],null);
InitPerfStat.perfstat[localcnt].EndGet(startTime, m_isMainWorkLoad);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while getting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
if (val == null)
{
string exStr = string.Format("Key[{0}] not found in region {1}",
m_keys[idx], m_region.Name);
Util.Log(Util.LogLevel.Error, exStr);
throw new EntryNotFoundException(exStr);
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class CreateTasks<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private TVal[] m_values;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
private Int32 m_assetAcSize;
private Int32 m_assetmaxVal;
#endregion
#region Public accessors
public TVal[] Values
{
get
{
return m_values;
}
set
{
m_values = value;
}
}
#endregion
public CreateTasks(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad, Int32 assetACsize, Int32 assetMaxVal)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_values = null;
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
m_assetAcSize = assetACsize;
m_assetmaxVal = assetMaxVal;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
Util.Log("CreateTasks::DoTask: starting {0} iterations.", iters);
while (Running && (iters-- != 0))
{
int idx = count % numKeys;
try
{
TVal obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey,
m_encodeTimestamp, m_assetAcSize, m_assetmaxVal, idx);
startTime = InitPerfStat.perfstat[localcnt].StartCreate();
//Util.Log("Create Keys is {0} object is {1}", m_keys[idx],obj.ToString());
m_region.Add(m_keys[idx], obj);
InitPerfStat.perfstat[localcnt].EndCreate(startTime, m_isMainWorkLoad);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class PutTasks<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
#endregion
public PutTasks(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
Util.Log("PutTasks::DoTask: starting {0} iterations.", iters);
while (Running && (iters-- != 0))
{
int idx = count % numKeys;
try
{
TVal obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey,
m_encodeTimestamp, 0, 0, 0);
startTime = InitPerfStat.perfstat[localcnt].StartPut();
m_region[m_keys[idx]] = obj;//.Put(m_keys[idx], obj);
InitPerfStat.perfstat[localcnt].EndPut(startTime, m_isMainWorkLoad);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class MeteredPutTask<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private int m_opsSec;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
#endregion
public MeteredPutTask(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad, int opsSec)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_opsSec = opsSec;
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
int idx;
PaceMeter pm = new PaceMeter(m_opsSec);
while (Running && (iters-- != 0))
{
idx = count % numKeys;
try
{
TVal obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey,
m_encodeTimestamp, 0, 0, 0);
startTime = InitPerfStat.perfstat[localcnt].StartPut();
m_region[m_keys[idx]] = obj;//.Put(m_keys[idx], obj);
InitPerfStat.perfstat[localcnt].EndPut(startTime, m_isMainWorkLoad);
pm.CheckPace();
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class PutGetMixTask<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
private Int32 m_putPercentage;
#endregion
public PutGetMixTask(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad,Int32 putpercentage)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
m_putPercentage = putpercentage;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
Util.Log("PutGetMixTask::DoTask: starting {0} iterations.", iters);
while (Running && (iters-- != 0))
{
int n = Util.Rand(1, 100);
int idx = count % numKeys;
if (n < m_putPercentage)
{
TVal obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey,
m_encodeTimestamp, 0, 0, 0);
startTime = InitPerfStat.perfstat[localcnt].StartPut();
m_region[m_keys[idx]] = obj;//.Put(m_keys[idx], obj);
InitPerfStat.perfstat[localcnt].EndPut(startTime, m_isMainWorkLoad);
}
else
{
TVal val = default(TVal);
startTime = InitPerfStat.perfstat[localcnt].StartGet();
val = m_region[m_keys[idx]];
InitPerfStat.perfstat[localcnt].EndGet(startTime, m_isMainWorkLoad);
if (val == null)
{
string exStr = string.Format("Key[{0}] not found in region {1}",
m_keys[idx], m_region.Name);
Util.Log(Util.LogLevel.Error, exStr);
throw new EntryNotFoundException(exStr);
}
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class RegionQueryTask<TKey, TVal> : ClientTask
{
private IRegion<TKey, TVal> m_region;
private Int32 m_cnt;
private string m_queryString;
public RegionQueryTask(IRegion<TKey, TVal> region, string queryString)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_cnt = 0;
m_queryString = queryString;
}
public override void DoTask(int iters, object data)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int offset = Util.Rand(100);
int count = offset;
long startTime;
while (Running && (iters-- != 0))
{
startTime = InitPerfStat.perfstat[localcnt].StartQuery();
ISelectResults<object> sptr = m_region.Query<object>(m_queryString, 600);
InitPerfStat.perfstat[localcnt].EndQuery(startTime, false);
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
public class PutBatchObjectTask<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
private Int32 m_batchSize;
private Int32 m_batchObjSize;
#endregion
public PutBatchObjectTask(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad,Int32 batchSize, Int32 objsize)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
m_batchSize = batchSize;
m_batchObjSize = objsize;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
Util.Log("PutBatchObjectTask::DoTask: starting {0} iterations.", iters);
while (Running && (iters-- != 0))
{
int idx = count % numKeys;
try
{
TVal obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey, m_encodeTimestamp,
m_batchSize, m_batchObjSize, idx);
startTime = InitPerfStat.perfstat[localcnt].StartPut();
m_region[m_keys[idx]] = obj;
InitPerfStat.perfstat[localcnt].EndPut(startTime, m_isMainWorkLoad);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class CreatePutAllMap<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private List<IDictionary<TKey, TVal>> m_maps;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
#endregion
public CreatePutAllMap(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
List<IDictionary<TKey, TVal>> maps, bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_maps = maps;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
IDictionary<TKey,TVal> hmoc = new Dictionary<TKey,TVal>();
lock (m_maps)
{
m_maps.Add(hmoc);
}
Util.Log("CreatePutAllMap::DoTask: starting {0} iterations. size of map list {1}", iters,m_maps.Count);
while (Running && (iters-- != 0))
{
int idx = count % numKeys;
try
{
TVal obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey, m_encodeTimestamp, 0, 0, 0);
//Util.Log("rjk CreatePutAllMap key[{0}] is {1}", idx, m_keys[idx]);
((IDictionary<object,object>)(m_maps[localcnt])).Add(m_keys[idx], obj);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putting key[{0}] for region {1} in iteration " +
"{2}: {3}", idx, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class PutAllMap<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private List<IDictionary<TKey, TVal>> m_maps;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
#endregion
public PutAllMap(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
List<IDictionary<TKey, TVal>> maps, bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_maps = maps as List<IDictionary<TKey, TVal>>;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
Util.Log("PutAllMap::DoTask: starting {0} iterations. size of map list {1}", iters,m_maps.Count);
while (Running && (iters-- != 0))
{
try
{
startTime = InitPerfStat.perfstat[localcnt].StartPut();
/*
foreach (CacheableHashMap map in m_maps)
{
Util.Log("PutAllMap:: mape keys = {0} size ={1}", map.Keys,map.Count);
}
CacheableHashMap putAllmap;
lock (m_maps)
{
putAllmap = m_maps[localcnt];
}
foreach (ICacheableKey key in putAllmap.Keys)
{
Util.Log("PutAllMap:: key = {0} ", key);
}
foreach (IGeodeSerializable val in putAllmap.Values)
{
Util.Log("PutAllMap:: value = {0} ", val);
}
foreach (KeyValuePair<ICacheableKey, IGeodeSerializable> item in putAllmap)
{
Util.Log("PutAllMap:: key = {0} value = {1} localcont = {2}", item.Key, item.Value, localcnt);
}
*/
m_region.PutAll(m_maps[localcnt], 60);
InitPerfStat.perfstat[localcnt].EndPut(startTime, m_isMainWorkLoad);
}
catch (Exception ex)
{
Util.Log(Util.LogLevel.Error,
"Exception while putAll map[{0}] for region {1} in iteration " +
"{2}: {3}", localcnt, m_region.Name, (count - offset), ex);
throw;
}
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
public class UpdateDeltaTask<TKey, TVal> : ClientTask
{
#region Private members
private IRegion<TKey, TVal> m_region;
private TKey[] m_keys;
private Int32 m_cnt;
private Int32 m_size;
private string m_objectType;
private bool m_encodeKey;
private bool m_encodeTimestamp;
private bool m_isMainWorkLoad;
private Int32 m_assetAcSize;
private Int32 m_assetmaxVal;
#endregion
public UpdateDeltaTask(IRegion<TKey, TVal> region, TKey[] keys, Int32 size, string objectType,
bool encodeKey, bool encodeTimestamp, bool isMainWorkLoad, Int32 assetACsize, Int32 assetMaxVal)
: base()
{
m_region = region as IRegion<TKey, TVal>;
m_keys = keys as TKey[];
m_cnt = 0;
m_size = size;
m_objectType = objectType;
m_encodeKey = encodeKey;
m_encodeTimestamp = encodeTimestamp;
m_isMainWorkLoad = isMainWorkLoad;
m_assetAcSize = assetACsize;
m_assetmaxVal = assetMaxVal;
}
public override void DoTask(int iters, object data)
{
if (m_keys != null && m_keys.Length > 0)
{
Int32 localcnt = m_cnt;
Interlocked.Increment(ref m_cnt);
int numKeys = m_keys.Length;
int offset = Util.Rand(numKeys);
int count = offset;
long startTime;
TVal obj = default(TVal);
Util.Log("UpdateDeltaTask::DoTask: starting {0} iterations.", iters);
while (Running && (iters-- != 0))
{
int idx = count % numKeys;
startTime = InitPerfStat.perfstat[localcnt].StartUpdate();
if (m_encodeKey)
{
obj = m_region[m_keys[idx]];
if (obj == null)
{
string exStr = string.Format("Key[{0}] has not been created in region {1}",
m_keys[idx], m_region.Name);
Util.Log(Util.LogLevel.Error, exStr);
throw new EntryNotFoundException(exStr);
}
}
else {
obj = ObjectHelper<TKey, TVal>.CreateObject(m_objectType, m_size, m_encodeKey, m_encodeTimestamp, m_assetAcSize, m_assetmaxVal, idx);
}
DeltaFastAssetAccount obj1 = obj as DeltaFastAssetAccount;
if(obj1 == null)
{
DeltaPSTObject obj2 = obj as DeltaPSTObject;
if (obj2 == null)
{
m_region[m_keys[idx]] = obj;
}
else{
obj2.Update();
}
}
else
{
obj1.Update();
}
InitPerfStat.perfstat[localcnt].EndUpdate(startTime, m_isMainWorkLoad);
count++;
}
Interlocked.Add(ref m_iters, count - offset);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics;
using Xunit;
namespace System.Collections.ObjectModel.Tests
{
/// <summary>
/// Tests the public properties and constructor in ObservableCollection<T>.
/// </summary>
public class ReadOnlyObservableCollectionTests
{
[Fact]
public static void Ctor_Tests()
{
string[] anArray = new string[] { "one", "two", "three", "four", "five" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(readOnlyCol, anArray);
helper.InitialItems_Tests();
IList<string> readOnlyColAsIList = readOnlyCol;
Assert.True(readOnlyColAsIList.IsReadOnly, "ReadOnlyObservableCollection should be readOnly.");
}
[Fact]
public static void Ctor_Tests_Negative()
{
ReadOnlyObservableCollection<string> collection;
Assert.Throws<ArgumentNullException>(() => collection = new ReadOnlyObservableCollection<string>(null));
}
[Fact]
public static void GetItemTests()
{
string[] anArray = new string[] { "one", "two", "three", "four", "five" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(readOnlyCol, anArray);
helper.Item_get_Tests();
}
[Fact]
public static void GetItemTests_Negative()
{
string[] anArray = new string[] { "one", "two", "three", "four", "five" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>(readOnlyCol, anArray);
helper.Item_get_Tests_Negative();
}
/// <summary>
/// Tests that contains returns true when the item is in the collection
/// and false otherwise.
/// </summary>
[Fact]
public static void ContainsTests()
{
string[] anArray = new string[] { "one", "two", "three", "four", "five" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
for (int i = 0; i < anArray.Length; i++)
{
string item = anArray[i];
Assert.True(readOnlyCol.Contains(item), "ReadOnlyCol did not contain item: " + anArray[i] + " at index: " + i);
}
Assert.False(readOnlyCol.Contains("randomItem"), "ReadOnlyCol should not have contained non-existent item");
Assert.False(readOnlyCol.Contains(null), "ReadOnlyCol should not have contained null");
}
/// <summary>
/// Tests that the collection can be copied into a destination array.
/// </summary>
[Fact]
public static void CopyToTest()
{
string[] anArray = new string[] { "one", "two", "three", "four" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
string[] aCopy = new string[anArray.Length];
readOnlyCol.CopyTo(aCopy, 0);
for (int i = 0; i < anArray.Length; ++i)
Assert.Equal(anArray[i], aCopy[i]);
// copy observable collection starting in middle, where array is larger than source.
aCopy = new string[anArray.Length + 2];
int offsetIndex = 1;
readOnlyCol.CopyTo(aCopy, offsetIndex);
for (int i = 0; i < aCopy.Length; i++)
{
string value = aCopy[i];
if (i == 0)
Assert.True(null == value, "Should not have a value since we did not start copying there.");
else if (i == (aCopy.Length - 1))
Assert.True(null == value, "Should not have a value since the collection is shorter than the copy array..");
else
{
int indexInCollection = i - offsetIndex;
Assert.Equal(readOnlyCol[indexInCollection], aCopy[i]);
}
}
}
/// <summary>
/// Tests that:
/// ArgumentOutOfRangeException is thrown when the Index is >= collection.Count
/// or Index < 0.
/// ArgumentException when the destination array does not have enough space to
/// contain the source Collection.
/// ArgumentNullException when the destination array is null.
/// </summary>
[Fact]
public static void CopyToTest_Negative()
{
string[] anArray = new string[] { "one", "two", "three", "four" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
int[] iArrInvalidValues = new Int32[] { -1, -2, -100, -1000, -10000, -100000, -1000000, -10000000, -100000000, -1000000000, Int32.MinValue };
foreach (var index in iArrInvalidValues)
{
string[] aCopy = new string[anArray.Length];
Assert.Throws<ArgumentOutOfRangeException>(() => readOnlyCol.CopyTo(aCopy, index));
}
int[] iArrLargeValues = new Int32[] { anArray.Length, Int32.MaxValue, Int32.MaxValue / 2, Int32.MaxValue / 10 };
foreach (var index in iArrLargeValues)
{
string[] aCopy = new string[anArray.Length];
AssertExtensions.Throws<ArgumentException>("destinationArray", null, () => readOnlyCol.CopyTo(aCopy, index));
}
Assert.Throws<ArgumentNullException>(() => readOnlyCol.CopyTo(null, 1));
string[] copy = new string[anArray.Length - 1];
AssertExtensions.Throws<ArgumentException>("destinationArray", "", () => readOnlyCol.CopyTo(copy, 0));
copy = new string[0];
AssertExtensions.Throws<ArgumentException>("destinationArray", "", () => readOnlyCol.CopyTo(copy, 0));
}
/// <summary>
/// Tests that the index of an item can be retrieved when the item is
/// in the collection and -1 otherwise.
/// </summary>
[Fact]
public static void IndexOfTest()
{
string[] anArray = new string[] { "one", "two", "three", "four" };
ReadOnlyObservableCollection<string> readOnlyCollection =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
for (int i = 0; i < anArray.Length; ++i)
Assert.Equal(i, readOnlyCollection.IndexOf(anArray[i]));
Assert.Equal(-1, readOnlyCollection.IndexOf("seven"));
Assert.Equal(-1, readOnlyCollection.IndexOf(null));
// testing that the first occurrence is the index returned.
ObservableCollection<int> intCol = new ObservableCollection<int>();
for (int i = 0; i < 4; ++i)
intCol.Add(i % 2);
ReadOnlyObservableCollection<int> intReadOnlyCol = new ReadOnlyObservableCollection<int>(intCol);
Assert.Equal(0, intReadOnlyCol.IndexOf(0));
Assert.Equal(1, intReadOnlyCol.IndexOf(1));
IList colAsIList = (IList)intReadOnlyCol;
var index = colAsIList.IndexOf("stringObj");
Assert.Equal(-1, index);
}
/// <summary>
/// Tests that a ReadOnlyDictionary cannot be modified. That is, that
/// Add, Remove, Clear does not work.
/// </summary>
[Fact]
public static void CannotModifyDictionaryTests_Negative()
{
string[] anArray = new string[] { "one", "two", "three", "four", "five" };
ReadOnlyObservableCollection<string> readOnlyCol =
new ReadOnlyObservableCollection<string>(new ObservableCollection<string>(anArray));
IReadOnlyList_T_Test<string> helper = new IReadOnlyList_T_Test<string>();
IList<string> readOnlyColAsIList = readOnlyCol;
Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Add("seven"));
Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Insert(0, "nine"));
Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Remove("one"));
Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.RemoveAt(0));
Assert.Throws<NotSupportedException>(() => readOnlyColAsIList.Clear());
helper.VerifyReadOnlyCollection(readOnlyCol, anArray);
}
[Fact]
// skip the test on desktop as "new ObservableCollection<int>()" returns 0 length collection
// skip the test on UapAot as the requires Reflection on internal framework types.
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework | TargetFrameworkMonikers.UapAot)]
public static void DebuggerAttribute_Tests()
{
DebuggerAttributes.ValidateDebuggerDisplayReferences(new ReadOnlyObservableCollection<int>(new ObservableCollection<int>()));
DebuggerAttributes.ValidateDebuggerTypeProxyProperties(new ReadOnlyObservableCollection<int>(new ObservableCollection<int>()));
}
}
internal class IReadOnlyList_T_Test<T>
{
private readonly IReadOnlyList<T> _collection;
private readonly T[] _expectedItems;
/// <summary>
/// Initializes a new instance of the IReadOnlyList_T_Test.
/// </summary>
/// <param name="collection">The collection to run the tests on.</param>
/// <param name="expectedItems">The items expected to be in the collection.</param>
public IReadOnlyList_T_Test(IReadOnlyList<T> collection, T[] expectedItems)
{
_collection = collection;
_expectedItems = expectedItems;
}
public IReadOnlyList_T_Test()
{
}
/// <summary>
/// This verifies that the collection contains the expected items.
/// </summary>
public void InitialItems_Tests()
{
// Verify Count returns the expected value
Assert.Equal(_expectedItems.Length, _collection.Count);
// Verify the initial items in the collection
VerifyReadOnlyCollection(_collection, _expectedItems);
}
/// <summary>
/// Runs all of the valid tests on get Item.
/// </summary>
public void Item_get_Tests()
{
// Verify get_Item with valid item on Collection
Verify_get(_collection, _expectedItems);
}
/// <summary>
/// Runs all of the argument checking(invalid) tests on get Item.
/// </summary>
public void Item_get_Tests_Negative()
{
// Verify get_Item with index=Int32.MinValue
Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[Int32.MinValue]; });
// Verify that the collection was not mutated
VerifyReadOnlyCollection(_collection, _expectedItems);
// Verify get_Item with index=-1
Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[-1]; });
// Verify that the collection was not mutated
VerifyReadOnlyCollection(_collection, _expectedItems);
if (_expectedItems.Length == 0)
{
// Verify get_Item with index=0 on Empty collection
Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[0]; });
// Verify that the collection was not mutated
VerifyReadOnlyCollection(_collection, _expectedItems);
}
else
{
// Verify get_Item with index=Count on Empty collection
Assert.Throws<ArgumentOutOfRangeException>(() => { T item = _collection[_expectedItems.Length]; });
// Verify that the collection was not mutated
VerifyReadOnlyCollection(_collection, _expectedItems);
}
}
#region Helper Methods
/// <summary>
/// Verifies that the items in the collection match the expected items.
/// </summary>
internal void VerifyReadOnlyCollection(IReadOnlyList<T> collection, T[] items)
{
Verify_get(collection, items);
VerifyGenericEnumerator(collection, items);
VerifyEnumerator(collection, items);
}
/// <summary>
/// Verifies that you can get all items that should be in the collection.
/// </summary>
private void Verify_get(IReadOnlyList<T> collection, T[] items)
{
Assert.Equal(items.Length, collection.Count);
for (int i = 0; i < items.Length; i++)
{
int itemsIndex = i;
Assert.Equal(items[itemsIndex], collection[i]);
}
}
/// <summary>
/// Verifies that the generic enumerator retrieves the correct items.
/// </summary>
private void VerifyGenericEnumerator(IReadOnlyList<T> collection, T[] expectedItems)
{
IEnumerator<T> enumerator = collection.GetEnumerator();
int iterations = 0;
int expectedCount = expectedItems.Length;
// There is a sequential order to the collection, so we're testing for that.
while ((iterations < expectedCount) && enumerator.MoveNext())
{
T currentItem = enumerator.Current;
T tempItem;
// Verify we have not gotten more items then we expected
Assert.True(iterations < expectedCount,
"Err_9844awpa More items have been returned from the enumerator(" + iterations + " items) than are in the expectedElements(" + expectedCount + " items)");
// Verify Current returned the correct value
Assert.Equal(currentItem, expectedItems[iterations]);
// Verify Current always returns the same value every time it is called
for (int i = 0; i < 3; i++)
{
tempItem = enumerator.Current;
Assert.Equal(currentItem, tempItem);
}
iterations++;
}
Assert.Equal(expectedCount, iterations);
for (int i = 0; i < 3; i++)
{
Assert.False(enumerator.MoveNext(), "Err_2929ahiea Expected MoveNext to return false after" + iterations + " iterations");
}
enumerator.Dispose();
}
/// <summary>
/// Verifies that the non-generic enumerator retrieves the correct items.
/// </summary>
private void VerifyEnumerator(IReadOnlyList<T> collection, T[] expectedItems)
{
IEnumerator enumerator = collection.GetEnumerator();
int iterations = 0;
int expectedCount = expectedItems.Length;
// There is no sequential order to the collection, so we're testing that all the items
// in the readonlydictionary exist in the array.
bool[] itemsVisited = new bool[expectedCount];
bool itemFound;
while ((iterations < expectedCount) && enumerator.MoveNext())
{
object currentItem = enumerator.Current;
object tempItem;
// Verify we have not gotten more items then we expected
Assert.True(iterations < expectedCount,
"Err_9844awpa More items have been returned from the enumerator(" + iterations + " items) then are in the expectedElements(" + expectedCount + " items)");
// Verify Current returned the correct value
itemFound = false;
for (int i = 0; i < itemsVisited.Length; ++i)
{
if (!itemsVisited[i] && expectedItems[i].Equals(currentItem))
{
itemsVisited[i] = true;
itemFound = true;
break;
}
}
Assert.True(itemFound, "Err_1432pauy Current returned unexpected value=" + currentItem);
// Verify Current always returns the same value every time it is called
for (int i = 0; i < 3; i++)
{
tempItem = enumerator.Current;
Assert.Equal(currentItem, tempItem);
}
iterations++;
}
for (int i = 0; i < expectedCount; ++i)
{
Assert.True(itemsVisited[i], "Err_052848ahiedoi Expected Current to return true for item: " + expectedItems[i] + "index: " + i);
}
Assert.Equal(expectedCount, iterations);
for (int i = 0; i < 3; i++)
{
Assert.False(enumerator.MoveNext(), "Err_2929ahiea Expected MoveNext to return false after" + iterations + " iterations");
}
}
#endregion
}
}
| |
//---------------------------------------------------------------------------
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// File: HitTestWithPointDrawingContextWalker.cs
//
// Description: The implementation of HitTestWithPointDrawingContextWalker,
// used to perform hit tests with a point on renderdata.
//
// History:
// 06/20/2005 : michka - Carved it out of HitTestDrawingWalker.cs
//
//---------------------------------------------------------------------------
using MS.Internal;
using System;
using System.ComponentModel;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel.Design.Serialization;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.Windows;
using System.Windows.Media;
using System.Windows.Media.Composition;
using System.Windows.Media.Effects;
using System.Windows.Media.Imaging;
namespace System.Windows.Media
{
/// <summary>
/// HitTestDrawingContextWalker - a DrawingContextWalker which performs a hit test with a point
/// </summary>
internal class HitTestWithPointDrawingContextWalker: HitTestDrawingContextWalker
{
/// <summary>
/// Constructor
/// </summary>
/// <param name="point"> Point - the point to hit test, in local coordinates. </param>
internal HitTestWithPointDrawingContextWalker(Point point)
{
_point = point;
}
/// <summary>
/// IsHit Property - Returns whether the point hit the drawing instructions.
/// </summary>
internal override bool IsHit
{
get
{
return _contains;
}
}
internal override IntersectionDetail IntersectionDetail
{
get
{
return _contains ? IntersectionDetail.FullyInside : IntersectionDetail.Empty;
}
}
#region Static Drawing Context Methods
/// <summary>
/// DrawGeometry -
/// Draw a Geometry with the provided Brush and/or Pen.
/// If both the Brush and Pen are null this call is a no-op.
/// </summary>
/// <param name="brush">
/// The Brush with which to fill the Geometry.
/// This is optional, and can be null, in which case no fill is performed.
/// </param>
/// <param name="pen">
/// The Pen with which to stroke the Geometry.
/// This is optional, and can be null, in which case no stroke is performed.
/// </param>
/// <param name="geometry"> The Geometry to fill and/or stroke. </param>
public override void DrawGeometry(
Brush brush,
Pen pen,
Geometry geometry)
{
if (IsCurrentLayerNoOp ||(geometry == null) || geometry.IsEmpty())
{
return;
}
if (brush != null)
{
_contains |= geometry.FillContains(_point);
}
// If we have a pen and we haven't yet hit, try the widened geometry.
if ((pen != null) && !_contains)
{
_contains |= geometry.StrokeContains(pen, _point);
}
// If we've hit, stop walking.
if (_contains)
{
StopWalking();
}
}
/// <summary>
/// Draw a GlyphRun.
/// </summary>
/// <param name="foregroundBrush">Foreground brush to draw GlyphRun with. </param>
/// <param name="glyphRun"> The GlyphRun to draw. </param>
/// <exception cref="ObjectDisposedException">
/// This call is illegal if this object has already been closed or disposed.
/// </exception>
public override void DrawGlyphRun(Brush foregroundBrush, GlyphRun glyphRun)
{
if (!IsCurrentLayerNoOp && (glyphRun != null))
{
// The InkBoundingBox + the Origin produce the true InkBoundingBox.
Rect rectangle = glyphRun.ComputeInkBoundingBox();
if (!rectangle.IsEmpty)
{
rectangle.Offset((Vector)glyphRun.BaselineOrigin);
_contains |= rectangle.Contains(_point);
// If we've hit, stop walking.
if (_contains)
{
StopWalking();
}
}
}
}
/// <summary>
/// PushClip -
/// Push a clip region, which will apply to all drawing primitives until the
/// corresponding Pop call.
/// </summary>
/// <param name="clipGeometry"> The Geometry to which we will clip. </param>
public override void PushClip(
Geometry clipGeometry)
{
if (!IsPushNoOp())
{
PushPointStack(_point);
// If the clip being pushed doesn't contain the hit test point,
// then we don't need to consider any of the subsequent Drawing
// operations in this layer.
if ((clipGeometry != null) && !clipGeometry.FillContains(_point))
{
IsCurrentLayerNoOp = true;
}
}
}
/// <summary>
/// PushOpacityMask -
/// Push an opacity mask
/// </summary>
/// <param name="brush">
/// The opacity mask brush
/// </param>
public override void PushOpacityMask(Brush brush)
{
if (!IsPushNoOp())
{
// This Push doesn't affect the hit test, so just push the current point
PushPointStack(_point);
}
}
/// <summary>
/// PushOpacity -
/// Push an opacity which will blend the composite of all drawing primitives added
/// until the corresponding Pop call.
/// </summary>
/// <param name="opacity">
/// The opacity with which to blend - 0 is transparent, 1 is opaque.
/// </param>
public override void PushOpacity(
Double opacity)
{
if (!IsPushNoOp())
{
// This Push doesn't affect the hit test, so just push the current point
PushPointStack(_point);
}
}
/// <summary>
/// PushTransform -
/// Push a Transform which will apply to all drawing operations until the corresponding
/// Pop.
/// </summary>
/// <param name="transform"> The Transform to push. </param>
public override void PushTransform(
Transform transform)
{
if (!IsPushNoOp())
{
if (transform == null || transform.IsIdentity)
{
PushPointStack(_point);
}
else
{
Matrix matrix = transform.Value;
if (matrix.HasInverse)
{
// Invert the transform. The inverse will be applied to the point
// so that hit testing is done in the original geometry's coordinates
matrix.Invert();
// Push the transformed point on the stack. This also updates _point.
PushPointStack(_point * matrix);
}
else
{
// If this transform doesn't have an inverse, then we don't need to consider any
// of the subsequent Drawing operations in this layer.
IsCurrentLayerNoOp = true;
}
}
}
}
/// <summary>
/// PushGuidelineSet -
/// Push a set of guidelines which should be applied
/// to all drawing operations until the
/// corresponding Pop.
/// </summary>
/// <param name="guidelines"> The GuidelineSet to push. </param>
public override void PushGuidelineSet(
GuidelineSet guidelines)
{
if (!IsPushNoOp())
{
// This Push doesn't affect the hit test, so just push the current point
PushPointStack(_point);
}
}
/// <summary>
/// PushGuidelineY1 -
/// Explicitly push one horizontal guideline.
/// </summary>
/// <param name="coordinate"> The coordinate of leading guideline. </param>
internal override void PushGuidelineY1(
Double coordinate)
{
if (!IsPushNoOp())
{
// This Push doesn't affect the hit test, so just push the current point
PushPointStack(_point);
}
}
/// <summary>
/// PushGuidelineY2 -
/// Explicitly push a pair of horizontal guidelines.
/// </summary>
/// <param name="leadingCoordinate">
/// The coordinate of leading guideline.
/// </param>
/// <param name="offsetToDrivenCoordinate">
/// The offset from leading guideline to driven guideline.
/// </param>
internal override void PushGuidelineY2(
Double leadingCoordinate,
Double offsetToDrivenCoordinate)
{
if (!IsPushNoOp())
{
// This Push doesn't affect the hit test, so just push the current point
PushPointStack(_point);
}
}
/// <summary>
/// PushEffect -
/// Push a BitmapEffect which will apply to all drawing operations until the
/// corresponding Pop.
/// </summary>
/// <param name="effect"> The BitmapEffect to push. </param>
/// <param name="effectInput"> The BitmapEffectInput. </param>
[Obsolete(MS.Internal.Media.VisualTreeUtils.BitmapEffectObsoleteMessage)]
public override void PushEffect(
BitmapEffect effect,
BitmapEffectInput effectInput)
{
if (!IsPushNoOp())
{
// This API has been deprecated, so any BitmapEffect is ignored.
PushPointStack(_point);
}
}
/// <summary>
/// Pop
/// </summary>
public override void Pop(
)
{
if (!IsPopNoOp())
{
PopPointStack();
}
}
#endregion Static Drawing Context Methods
#region Private Methods
/// <summary>
/// PushPointStack - push a point onto the stack and update _point with it.
/// </summary>
/// <param name="point"> The new Point to push. </param>
private void PushPointStack(Point point)
{
if (_pointStack == null)
{
_pointStack = new Stack<Point>(2);
}
// Push the old point.
_pointStack.Push(_point);
// update current point
_point = point;
}
/// <summary>
/// PopPointStack - pop a point off of the point stack and update _point.
/// </summary>
private void PopPointStack()
{
// We must have a point stack and it must not be empty.
Debug.Assert(_pointStack != null);
Debug.Assert(_pointStack.Count > 0);
// Retrieve the previous point from the stack.
_point = _pointStack.Pop();
}
/// <summary>
/// Called by every Push operation, this method returns whether or not
/// the operation should be a no-op. If the current subgraph layer
/// is being no-op'd, it also increments the no-op depth.
/// </summary>
private bool IsPushNoOp()
{
if (IsCurrentLayerNoOp)
{
// Increment the depth so that the no-op status isn't reset
// when this layer's cooresponding Pop is called.
_noOpLayerDepth++;
return true;
}
else
{
return false;
}
}
/// <summary>
/// Called by Pop, this method returns whether or not Pop should be
/// a no-op'd. If the current subgraph layer is being no-op'd, it also
/// decrements the no-op depth, then reset's the no-op status if this
/// is the last Pop in the no-op layer.
/// </summary>
private bool IsPopNoOp()
{
if (IsCurrentLayerNoOp)
{
Debug.Assert(_noOpLayerDepth >= 1);
_noOpLayerDepth--;
// If this Pop cooresponds to the Push that created
// the no-op layer, then reset the no-op status.
if (_noOpLayerDepth == 0)
{
IsCurrentLayerNoOp = false;
}
return true;
}
else
{
return false;
}
}
/// <summary>
/// Set/resets and gets whether or not the current subgraph layer is a no-op.
/// Currently, all subsequent instructions are skipped (no-op'd) when a non-invertible
/// transform is pushed (because we have to invert the matrix to perform
/// a hit-test), or during a point hit-test when a clip is pushed that
/// doesn't contain the point.
/// </summary>
private bool IsCurrentLayerNoOp
{
set
{
if (value == true)
{
// Guard that we aren't already in a no-op layer
//
// Instructions that can cause the layer to be no-op'd should be
// no-op'd themselves, and thus can't call this method, if we
// are already in a no-op layer
Debug.Assert(!_currentLayerIsNoOp);
Debug.Assert(_noOpLayerDepth == 0);
// Set the no-op status & initial depth
_currentLayerIsNoOp = true;
_noOpLayerDepth++;
}
else
{
// Guard that we are in a no-op layer, and that the correct corresponding
// Pop has been called.
Debug.Assert(_currentLayerIsNoOp);
Debug.Assert(_noOpLayerDepth == 0);
// Reset the no-op status
_currentLayerIsNoOp = false;
}
}
get
{
return _currentLayerIsNoOp;
}
}
#endregion Private Methods
#region Private Fields
// If _isPointHitTest is true, this _point is the hit test point.
private Point _point;
// The hit test point transformed to target geometry's original coordinates
private Stack<Point> _pointStack;
// When true, all instructions should be perform no logic until the
// layer is exited via a Pop()
private bool _currentLayerIsNoOp;
// Number of Pop() calls until _currentLayerIsNoOp should be reset.
private int _noOpLayerDepth;
#endregion Private Fields
}
}
| |
using System;
using System.Text;
using System.Data;
using System.Data.SqlClient;
using System.Data.Common;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Xml;
using System.Xml.Serialization;
using SubSonic;
using SubSonic.Utilities;
// <auto-generated />
namespace SAEON.Observations.Data
{
/// <summary>
/// Strongly-typed collection for the Sensor class.
/// </summary>
[Serializable]
public partial class SensorCollection : ActiveList<Sensor, SensorCollection>
{
public SensorCollection() {}
/// <summary>
/// Filters an existing collection based on the set criteria. This is an in-memory filter
/// Thanks to developingchris for this!
/// </summary>
/// <returns>SensorCollection</returns>
public SensorCollection Filter()
{
for (int i = this.Count - 1; i > -1; i--)
{
Sensor o = this[i];
foreach (SubSonic.Where w in this.wheres)
{
bool remove = false;
System.Reflection.PropertyInfo pi = o.GetType().GetProperty(w.ColumnName);
if (pi.CanRead)
{
object val = pi.GetValue(o, null);
switch (w.Comparison)
{
case SubSonic.Comparison.Equals:
if (!val.Equals(w.ParameterValue))
{
remove = true;
}
break;
}
}
if (remove)
{
this.Remove(o);
break;
}
}
}
return this;
}
}
/// <summary>
/// This is an ActiveRecord class which wraps the Sensor table.
/// </summary>
[Serializable]
public partial class Sensor : ActiveRecord<Sensor>, IActiveRecord
{
#region .ctors and Default Settings
public Sensor()
{
SetSQLProps();
InitSetDefaults();
MarkNew();
}
private void InitSetDefaults() { SetDefaults(); }
public Sensor(bool useDatabaseDefaults)
{
SetSQLProps();
if(useDatabaseDefaults)
ForceDefaults();
MarkNew();
}
public Sensor(object keyID)
{
SetSQLProps();
InitSetDefaults();
LoadByKey(keyID);
}
public Sensor(string columnName, object columnValue)
{
SetSQLProps();
InitSetDefaults();
LoadByParam(columnName,columnValue);
}
protected static void SetSQLProps() { GetTableSchema(); }
#endregion
#region Schema and Query Accessor
public static Query CreateQuery() { return new Query(Schema); }
public static TableSchema.Table Schema
{
get
{
if (BaseSchema == null)
SetSQLProps();
return BaseSchema;
}
}
private static void GetTableSchema()
{
if(!IsSchemaInitialized)
{
//Schema declaration
TableSchema.Table schema = new TableSchema.Table("Sensor", TableType.Table, DataService.GetInstance("ObservationsDB"));
schema.Columns = new TableSchema.TableColumnCollection();
schema.SchemaName = @"dbo";
//columns
TableSchema.TableColumn colvarId = new TableSchema.TableColumn(schema);
colvarId.ColumnName = "ID";
colvarId.DataType = DbType.Guid;
colvarId.MaxLength = 0;
colvarId.AutoIncrement = false;
colvarId.IsNullable = false;
colvarId.IsPrimaryKey = true;
colvarId.IsForeignKey = false;
colvarId.IsReadOnly = false;
colvarId.DefaultSetting = @"(newid())";
colvarId.ForeignKeyTableName = "";
schema.Columns.Add(colvarId);
TableSchema.TableColumn colvarCode = new TableSchema.TableColumn(schema);
colvarCode.ColumnName = "Code";
colvarCode.DataType = DbType.AnsiString;
colvarCode.MaxLength = 75;
colvarCode.AutoIncrement = false;
colvarCode.IsNullable = false;
colvarCode.IsPrimaryKey = false;
colvarCode.IsForeignKey = false;
colvarCode.IsReadOnly = false;
colvarCode.DefaultSetting = @"";
colvarCode.ForeignKeyTableName = "";
schema.Columns.Add(colvarCode);
TableSchema.TableColumn colvarName = new TableSchema.TableColumn(schema);
colvarName.ColumnName = "Name";
colvarName.DataType = DbType.AnsiString;
colvarName.MaxLength = 150;
colvarName.AutoIncrement = false;
colvarName.IsNullable = false;
colvarName.IsPrimaryKey = false;
colvarName.IsForeignKey = false;
colvarName.IsReadOnly = false;
colvarName.DefaultSetting = @"";
colvarName.ForeignKeyTableName = "";
schema.Columns.Add(colvarName);
TableSchema.TableColumn colvarDescription = new TableSchema.TableColumn(schema);
colvarDescription.ColumnName = "Description";
colvarDescription.DataType = DbType.AnsiString;
colvarDescription.MaxLength = 5000;
colvarDescription.AutoIncrement = false;
colvarDescription.IsNullable = true;
colvarDescription.IsPrimaryKey = false;
colvarDescription.IsForeignKey = false;
colvarDescription.IsReadOnly = false;
colvarDescription.DefaultSetting = @"";
colvarDescription.ForeignKeyTableName = "";
schema.Columns.Add(colvarDescription);
TableSchema.TableColumn colvarUrl = new TableSchema.TableColumn(schema);
colvarUrl.ColumnName = "Url";
colvarUrl.DataType = DbType.AnsiString;
colvarUrl.MaxLength = 250;
colvarUrl.AutoIncrement = false;
colvarUrl.IsNullable = true;
colvarUrl.IsPrimaryKey = false;
colvarUrl.IsForeignKey = false;
colvarUrl.IsReadOnly = false;
colvarUrl.DefaultSetting = @"";
colvarUrl.ForeignKeyTableName = "";
schema.Columns.Add(colvarUrl);
TableSchema.TableColumn colvarPhenomenonID = new TableSchema.TableColumn(schema);
colvarPhenomenonID.ColumnName = "PhenomenonID";
colvarPhenomenonID.DataType = DbType.Guid;
colvarPhenomenonID.MaxLength = 0;
colvarPhenomenonID.AutoIncrement = false;
colvarPhenomenonID.IsNullable = false;
colvarPhenomenonID.IsPrimaryKey = false;
colvarPhenomenonID.IsForeignKey = true;
colvarPhenomenonID.IsReadOnly = false;
colvarPhenomenonID.DefaultSetting = @"";
colvarPhenomenonID.ForeignKeyTableName = "Phenomenon";
schema.Columns.Add(colvarPhenomenonID);
TableSchema.TableColumn colvarDataSourceID = new TableSchema.TableColumn(schema);
colvarDataSourceID.ColumnName = "DataSourceID";
colvarDataSourceID.DataType = DbType.Guid;
colvarDataSourceID.MaxLength = 0;
colvarDataSourceID.AutoIncrement = false;
colvarDataSourceID.IsNullable = false;
colvarDataSourceID.IsPrimaryKey = false;
colvarDataSourceID.IsForeignKey = true;
colvarDataSourceID.IsReadOnly = false;
colvarDataSourceID.DefaultSetting = @"";
colvarDataSourceID.ForeignKeyTableName = "DataSource";
schema.Columns.Add(colvarDataSourceID);
TableSchema.TableColumn colvarDataSchemaID = new TableSchema.TableColumn(schema);
colvarDataSchemaID.ColumnName = "DataSchemaID";
colvarDataSchemaID.DataType = DbType.Guid;
colvarDataSchemaID.MaxLength = 0;
colvarDataSchemaID.AutoIncrement = false;
colvarDataSchemaID.IsNullable = true;
colvarDataSchemaID.IsPrimaryKey = false;
colvarDataSchemaID.IsForeignKey = true;
colvarDataSchemaID.IsReadOnly = false;
colvarDataSchemaID.DefaultSetting = @"";
colvarDataSchemaID.ForeignKeyTableName = "DataSchema";
schema.Columns.Add(colvarDataSchemaID);
TableSchema.TableColumn colvarLatitude = new TableSchema.TableColumn(schema);
colvarLatitude.ColumnName = "Latitude";
colvarLatitude.DataType = DbType.Double;
colvarLatitude.MaxLength = 0;
colvarLatitude.AutoIncrement = false;
colvarLatitude.IsNullable = true;
colvarLatitude.IsPrimaryKey = false;
colvarLatitude.IsForeignKey = false;
colvarLatitude.IsReadOnly = false;
colvarLatitude.DefaultSetting = @"";
colvarLatitude.ForeignKeyTableName = "";
schema.Columns.Add(colvarLatitude);
TableSchema.TableColumn colvarLongitude = new TableSchema.TableColumn(schema);
colvarLongitude.ColumnName = "Longitude";
colvarLongitude.DataType = DbType.Double;
colvarLongitude.MaxLength = 0;
colvarLongitude.AutoIncrement = false;
colvarLongitude.IsNullable = true;
colvarLongitude.IsPrimaryKey = false;
colvarLongitude.IsForeignKey = false;
colvarLongitude.IsReadOnly = false;
colvarLongitude.DefaultSetting = @"";
colvarLongitude.ForeignKeyTableName = "";
schema.Columns.Add(colvarLongitude);
TableSchema.TableColumn colvarElevation = new TableSchema.TableColumn(schema);
colvarElevation.ColumnName = "Elevation";
colvarElevation.DataType = DbType.Double;
colvarElevation.MaxLength = 0;
colvarElevation.AutoIncrement = false;
colvarElevation.IsNullable = true;
colvarElevation.IsPrimaryKey = false;
colvarElevation.IsForeignKey = false;
colvarElevation.IsReadOnly = false;
colvarElevation.DefaultSetting = @"";
colvarElevation.ForeignKeyTableName = "";
schema.Columns.Add(colvarElevation);
TableSchema.TableColumn colvarUserId = new TableSchema.TableColumn(schema);
colvarUserId.ColumnName = "UserId";
colvarUserId.DataType = DbType.Guid;
colvarUserId.MaxLength = 0;
colvarUserId.AutoIncrement = false;
colvarUserId.IsNullable = false;
colvarUserId.IsPrimaryKey = false;
colvarUserId.IsForeignKey = true;
colvarUserId.IsReadOnly = false;
colvarUserId.DefaultSetting = @"";
colvarUserId.ForeignKeyTableName = "aspnet_Users";
schema.Columns.Add(colvarUserId);
TableSchema.TableColumn colvarAddedAt = new TableSchema.TableColumn(schema);
colvarAddedAt.ColumnName = "AddedAt";
colvarAddedAt.DataType = DbType.DateTime;
colvarAddedAt.MaxLength = 0;
colvarAddedAt.AutoIncrement = false;
colvarAddedAt.IsNullable = true;
colvarAddedAt.IsPrimaryKey = false;
colvarAddedAt.IsForeignKey = false;
colvarAddedAt.IsReadOnly = false;
colvarAddedAt.DefaultSetting = @"(getdate())";
colvarAddedAt.ForeignKeyTableName = "";
schema.Columns.Add(colvarAddedAt);
TableSchema.TableColumn colvarUpdatedAt = new TableSchema.TableColumn(schema);
colvarUpdatedAt.ColumnName = "UpdatedAt";
colvarUpdatedAt.DataType = DbType.DateTime;
colvarUpdatedAt.MaxLength = 0;
colvarUpdatedAt.AutoIncrement = false;
colvarUpdatedAt.IsNullable = true;
colvarUpdatedAt.IsPrimaryKey = false;
colvarUpdatedAt.IsForeignKey = false;
colvarUpdatedAt.IsReadOnly = false;
colvarUpdatedAt.DefaultSetting = @"(getdate())";
colvarUpdatedAt.ForeignKeyTableName = "";
schema.Columns.Add(colvarUpdatedAt);
TableSchema.TableColumn colvarRowVersion = new TableSchema.TableColumn(schema);
colvarRowVersion.ColumnName = "RowVersion";
colvarRowVersion.DataType = DbType.Binary;
colvarRowVersion.MaxLength = 0;
colvarRowVersion.AutoIncrement = false;
colvarRowVersion.IsNullable = false;
colvarRowVersion.IsPrimaryKey = false;
colvarRowVersion.IsForeignKey = false;
colvarRowVersion.IsReadOnly = true;
colvarRowVersion.DefaultSetting = @"";
colvarRowVersion.ForeignKeyTableName = "";
schema.Columns.Add(colvarRowVersion);
BaseSchema = schema;
//add this schema to the provider
//so we can query it later
DataService.Providers["ObservationsDB"].AddSchema("Sensor",schema);
}
}
#endregion
#region Props
[XmlAttribute("Id")]
[Bindable(true)]
public Guid Id
{
get { return GetColumnValue<Guid>(Columns.Id); }
set { SetColumnValue(Columns.Id, value); }
}
[XmlAttribute("Code")]
[Bindable(true)]
public string Code
{
get { return GetColumnValue<string>(Columns.Code); }
set { SetColumnValue(Columns.Code, value); }
}
[XmlAttribute("Name")]
[Bindable(true)]
public string Name
{
get { return GetColumnValue<string>(Columns.Name); }
set { SetColumnValue(Columns.Name, value); }
}
[XmlAttribute("Description")]
[Bindable(true)]
public string Description
{
get { return GetColumnValue<string>(Columns.Description); }
set { SetColumnValue(Columns.Description, value); }
}
[XmlAttribute("Url")]
[Bindable(true)]
public string Url
{
get { return GetColumnValue<string>(Columns.Url); }
set { SetColumnValue(Columns.Url, value); }
}
[XmlAttribute("PhenomenonID")]
[Bindable(true)]
public Guid PhenomenonID
{
get { return GetColumnValue<Guid>(Columns.PhenomenonID); }
set { SetColumnValue(Columns.PhenomenonID, value); }
}
[XmlAttribute("DataSourceID")]
[Bindable(true)]
public Guid DataSourceID
{
get { return GetColumnValue<Guid>(Columns.DataSourceID); }
set { SetColumnValue(Columns.DataSourceID, value); }
}
[XmlAttribute("DataSchemaID")]
[Bindable(true)]
public Guid? DataSchemaID
{
get { return GetColumnValue<Guid?>(Columns.DataSchemaID); }
set { SetColumnValue(Columns.DataSchemaID, value); }
}
[XmlAttribute("Latitude")]
[Bindable(true)]
public double? Latitude
{
get { return GetColumnValue<double?>(Columns.Latitude); }
set { SetColumnValue(Columns.Latitude, value); }
}
[XmlAttribute("Longitude")]
[Bindable(true)]
public double? Longitude
{
get { return GetColumnValue<double?>(Columns.Longitude); }
set { SetColumnValue(Columns.Longitude, value); }
}
[XmlAttribute("Elevation")]
[Bindable(true)]
public double? Elevation
{
get { return GetColumnValue<double?>(Columns.Elevation); }
set { SetColumnValue(Columns.Elevation, value); }
}
[XmlAttribute("UserId")]
[Bindable(true)]
public Guid UserId
{
get { return GetColumnValue<Guid>(Columns.UserId); }
set { SetColumnValue(Columns.UserId, value); }
}
[XmlAttribute("AddedAt")]
[Bindable(true)]
public DateTime? AddedAt
{
get { return GetColumnValue<DateTime?>(Columns.AddedAt); }
set { SetColumnValue(Columns.AddedAt, value); }
}
[XmlAttribute("UpdatedAt")]
[Bindable(true)]
public DateTime? UpdatedAt
{
get { return GetColumnValue<DateTime?>(Columns.UpdatedAt); }
set { SetColumnValue(Columns.UpdatedAt, value); }
}
[XmlAttribute("RowVersion")]
[Bindable(true)]
public byte[] RowVersion
{
get { return GetColumnValue<byte[]>(Columns.RowVersion); }
set { SetColumnValue(Columns.RowVersion, value); }
}
#endregion
#region PrimaryKey Methods
protected override void SetPrimaryKey(object oValue)
{
base.SetPrimaryKey(oValue);
SetPKValues();
}
public SAEON.Observations.Data.DataLogCollection DataLogRecords()
{
return new SAEON.Observations.Data.DataLogCollection().Where(DataLog.Columns.SensorID, Id).Load();
}
public SAEON.Observations.Data.DataSourceTransformationCollection DataSourceTransformationRecords()
{
return new SAEON.Observations.Data.DataSourceTransformationCollection().Where(DataSourceTransformation.Columns.SensorID, Id).Load();
}
public SAEON.Observations.Data.ImportBatchSummaryCollection ImportBatchSummaryRecords()
{
return new SAEON.Observations.Data.ImportBatchSummaryCollection().Where(ImportBatchSummary.Columns.SensorID, Id).Load();
}
public SAEON.Observations.Data.InstrumentSensorCollection InstrumentSensorRecords()
{
return new SAEON.Observations.Data.InstrumentSensorCollection().Where(InstrumentSensor.Columns.SensorID, Id).Load();
}
public SAEON.Observations.Data.ObservationCollection ObservationRecords()
{
return new SAEON.Observations.Data.ObservationCollection().Where(Observation.Columns.SensorID, Id).Load();
}
#endregion
#region ForeignKey Properties
private SAEON.Observations.Data.AspnetUser _AspnetUser = null;
/// <summary>
/// Returns a AspnetUser ActiveRecord object related to this Sensor
///
/// </summary>
public SAEON.Observations.Data.AspnetUser AspnetUser
{
// get { return SAEON.Observations.Data.AspnetUser.FetchByID(this.UserId); }
get { return _AspnetUser ?? (_AspnetUser = SAEON.Observations.Data.AspnetUser.FetchByID(this.UserId)); }
set { SetColumnValue("UserId", value.UserId); }
}
private SAEON.Observations.Data.DataSchema _DataSchema = null;
/// <summary>
/// Returns a DataSchema ActiveRecord object related to this Sensor
///
/// </summary>
public SAEON.Observations.Data.DataSchema DataSchema
{
// get { return SAEON.Observations.Data.DataSchema.FetchByID(this.DataSchemaID); }
get { return _DataSchema ?? (_DataSchema = SAEON.Observations.Data.DataSchema.FetchByID(this.DataSchemaID)); }
set { SetColumnValue("DataSchemaID", value.Id); }
}
private SAEON.Observations.Data.DataSource _DataSource = null;
/// <summary>
/// Returns a DataSource ActiveRecord object related to this Sensor
///
/// </summary>
public SAEON.Observations.Data.DataSource DataSource
{
// get { return SAEON.Observations.Data.DataSource.FetchByID(this.DataSourceID); }
get { return _DataSource ?? (_DataSource = SAEON.Observations.Data.DataSource.FetchByID(this.DataSourceID)); }
set { SetColumnValue("DataSourceID", value.Id); }
}
private SAEON.Observations.Data.Phenomenon _Phenomenon = null;
/// <summary>
/// Returns a Phenomenon ActiveRecord object related to this Sensor
///
/// </summary>
public SAEON.Observations.Data.Phenomenon Phenomenon
{
// get { return SAEON.Observations.Data.Phenomenon.FetchByID(this.PhenomenonID); }
get { return _Phenomenon ?? (_Phenomenon = SAEON.Observations.Data.Phenomenon.FetchByID(this.PhenomenonID)); }
set { SetColumnValue("PhenomenonID", value.Id); }
}
#endregion
//no ManyToMany tables defined (0)
#region ObjectDataSource support
/// <summary>
/// Inserts a record, can be used with the Object Data Source
/// </summary>
public static void Insert(Guid varId,string varCode,string varName,string varDescription,string varUrl,Guid varPhenomenonID,Guid varDataSourceID,Guid? varDataSchemaID,double? varLatitude,double? varLongitude,double? varElevation,Guid varUserId,DateTime? varAddedAt,DateTime? varUpdatedAt,byte[] varRowVersion)
{
Sensor item = new Sensor();
item.Id = varId;
item.Code = varCode;
item.Name = varName;
item.Description = varDescription;
item.Url = varUrl;
item.PhenomenonID = varPhenomenonID;
item.DataSourceID = varDataSourceID;
item.DataSchemaID = varDataSchemaID;
item.Latitude = varLatitude;
item.Longitude = varLongitude;
item.Elevation = varElevation;
item.UserId = varUserId;
item.AddedAt = varAddedAt;
item.UpdatedAt = varUpdatedAt;
item.RowVersion = varRowVersion;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
/// <summary>
/// Updates a record, can be used with the Object Data Source
/// </summary>
public static void Update(Guid varId,string varCode,string varName,string varDescription,string varUrl,Guid varPhenomenonID,Guid varDataSourceID,Guid? varDataSchemaID,double? varLatitude,double? varLongitude,double? varElevation,Guid varUserId,DateTime? varAddedAt,DateTime? varUpdatedAt,byte[] varRowVersion)
{
Sensor item = new Sensor();
item.Id = varId;
item.Code = varCode;
item.Name = varName;
item.Description = varDescription;
item.Url = varUrl;
item.PhenomenonID = varPhenomenonID;
item.DataSourceID = varDataSourceID;
item.DataSchemaID = varDataSchemaID;
item.Latitude = varLatitude;
item.Longitude = varLongitude;
item.Elevation = varElevation;
item.UserId = varUserId;
item.AddedAt = varAddedAt;
item.UpdatedAt = varUpdatedAt;
item.RowVersion = varRowVersion;
item.IsNew = false;
if (System.Web.HttpContext.Current != null)
item.Save(System.Web.HttpContext.Current.User.Identity.Name);
else
item.Save(System.Threading.Thread.CurrentPrincipal.Identity.Name);
}
#endregion
#region Typed Columns
public static TableSchema.TableColumn IdColumn
{
get { return Schema.Columns[0]; }
}
public static TableSchema.TableColumn CodeColumn
{
get { return Schema.Columns[1]; }
}
public static TableSchema.TableColumn NameColumn
{
get { return Schema.Columns[2]; }
}
public static TableSchema.TableColumn DescriptionColumn
{
get { return Schema.Columns[3]; }
}
public static TableSchema.TableColumn UrlColumn
{
get { return Schema.Columns[4]; }
}
public static TableSchema.TableColumn PhenomenonIDColumn
{
get { return Schema.Columns[5]; }
}
public static TableSchema.TableColumn DataSourceIDColumn
{
get { return Schema.Columns[6]; }
}
public static TableSchema.TableColumn DataSchemaIDColumn
{
get { return Schema.Columns[7]; }
}
public static TableSchema.TableColumn LatitudeColumn
{
get { return Schema.Columns[8]; }
}
public static TableSchema.TableColumn LongitudeColumn
{
get { return Schema.Columns[9]; }
}
public static TableSchema.TableColumn ElevationColumn
{
get { return Schema.Columns[10]; }
}
public static TableSchema.TableColumn UserIdColumn
{
get { return Schema.Columns[11]; }
}
public static TableSchema.TableColumn AddedAtColumn
{
get { return Schema.Columns[12]; }
}
public static TableSchema.TableColumn UpdatedAtColumn
{
get { return Schema.Columns[13]; }
}
public static TableSchema.TableColumn RowVersionColumn
{
get { return Schema.Columns[14]; }
}
#endregion
#region Columns Struct
public struct Columns
{
public static string Id = @"ID";
public static string Code = @"Code";
public static string Name = @"Name";
public static string Description = @"Description";
public static string Url = @"Url";
public static string PhenomenonID = @"PhenomenonID";
public static string DataSourceID = @"DataSourceID";
public static string DataSchemaID = @"DataSchemaID";
public static string Latitude = @"Latitude";
public static string Longitude = @"Longitude";
public static string Elevation = @"Elevation";
public static string UserId = @"UserId";
public static string AddedAt = @"AddedAt";
public static string UpdatedAt = @"UpdatedAt";
public static string RowVersion = @"RowVersion";
}
#endregion
#region Update PK Collections
public void SetPKValues()
{
}
#endregion
#region Deep Save
public void DeepSave()
{
Save();
}
#endregion
}
}
| |
//css_dbg /t:winexe, /platform:x86, /args:BookPages.xap;
//css_host /platform:x86;
using System;
using System.Drawing;
using System.IO;
using System.Net;
using Microsoft.Win32;
using System.Net.Sockets;
using System.Text;
using System.Windows.Forms;
using CSScriptLibrary;
using csscript;
using System.Resources;
using System.Reflection;
using System.Diagnostics;
//Inspired by:
//http://blogs.microsoft.co.il/blogs/tamir/archive/2008/05/02/stand-alone-multiplatform-silverlight-application.aspx
//Silverlight BookPages XAP is based on http://www.switchonthecode.com/tutorials/silverlight-3-tutorial-planeprojection-and-perspective-3d
//Because of the problems with the IE COM hosting as an ActiveX control the host application must be of a x86 CPU architecture.
class Script
{
const string usage =
"Usage: cscs silverlight [/i]|[/u] [/exe] xapFile\n" +
"Executes silverlight XAP file without launching the browser or cretaing Web page.\n" +
" /i - install Windows Explorer shell extensions.\n" +
" /u - uninstall Windows Explorer shell extensions.\n" +
" /exe - generates self contained executable for executing silverlight XAP.\n";
[STAThread]
static void Main(string[] args)
{
if (!Utils.TryRenderXAPFromResources()) //try to runas a self-contained executable
{
//run as a script
if (args.Length == 0)
{
Console.WriteLine("You must specify the XAP file to execute.\n" + usage);
}
else if (args.Length == 1 && (args[0] == "?" || args[0] == "/?" || args[0] == "-?" || args[0].ToLower() == "help"))
{
Console.WriteLine(usage);
}
else
{
try
{
if (args.Length == 1)
{
if (args[0].ToLower() == "/u")
{
UnInstallShellExtension();
}
else if (args[0].ToLower() == "/i")
{
InstallShellExtension();
}
else
{
var xapFile = Path.GetFullPath(args[0]);
Utils.RenderXAP(xapFile);
}
}
else
{
if (args[0] == "/exe")
{
var xapFile = Path.GetFullPath(args[1]);
Utils.GenerateExecutable(xapFile);
}
else
{
Console.WriteLine(string.Format("Unexpected command line parameter {0}\n(1)", args[0], usage));
}
}
}
catch (Exception ex)
{
Console.WriteLine(ex.ToString());
MessageBox.Show(ex.ToString());
}
}
}
}
static void UnInstallShellExtension()
{
try
{
Registry.ClassesRoot.DeleteSubKeyTree(@".xap\shell");
Console.WriteLine("Silverlight Player (XAP viewer) shell extensions have been removed.");
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
}
static void InstallShellExtension()
{
if (Environment.GetEnvironmentVariable("CSSCRIPT_DIR") == null)
{
Console.WriteLine("You must install CS-Script first.");
return;
}
using (var shell = Registry.ClassesRoot.CreateSubKey(@".xap\shell\Veiw\command"))
{
if (shell != null)
shell.SetValue("", Environment.ExpandEnvironmentVariables("\"%CSSCRIPT_DIR%\\csws.exe\" silverlight.cs \"%1\""));
}
using (var shell = Registry.ClassesRoot.CreateSubKey(@".xap\shell\Convert to EXE\command"))
{
if (shell != null)
shell.SetValue("", Environment.ExpandEnvironmentVariables("\"%CSSCRIPT_DIR%\\csws.exe\" silverlight.cs /exe \"%1\""));
}
Console.WriteLine("Silverlight Player (XAP viewer) shell extensions have been created.");
}
}
public class Utils
{
public static bool TryRenderXAPFromResources()
{
byte[] xapData = Utils.GetXAPFromResources();
if (xapData != null)
{
Application.Run(new SLViewer(xapData, Assembly.GetExecutingAssembly().GetName().Name));
return true;
}
else
return false;
}
public static void RenderXAP(string xapFile)
{
Application.Run(new SLViewer(xapFile));
}
public static void GenerateExecutable(string xapFile)
{
string resource = "";
string xapSource = "";
try
{
var scriptFile = CSSEnvironment.PrimaryScriptFile;
//scriptFile = @"E:\cs-script\Lib\silverlight.cs"; //if not executed as script (e.g. under IDE) then find the way to discover the location of this source code file
if (scriptFile == null || !File.Exists(scriptFile))
throw new Exception("In order to generate executable you must run 'silverlight.cs' as a script");
string csscriptEngine = Assembly.GetEntryAssembly().Location;
//csscriptEngine = @"E:\cs-script\cscs.exe"; //if not executed as script (e.g. under IDE) then find the way to discover CS-Script engine location
resource = Utils.ToResource(xapFile);
xapSource = Path.Combine(Path.GetTempPath(),
Path.GetFileNameWithoutExtension(xapFile) + ".cs");
using (StreamWriter sw = new StreamWriter(xapSource))
{
sw.WriteLine("//css_co /platform:x86;");
sw.WriteLine("//css_res " + resource + ";");
sw.WriteLine("//css_inc " + scriptFile + ";");
}
Process
.Start(csscriptEngine, "/ew \"" + xapSource + "\"")
.WaitForExit();
var exe = Path.ChangeExtension(xapFile, ".exe");
var compiledAssembly = Path.ChangeExtension(xapSource, ".exe");
if (File.Exists(compiledAssembly))
{
if (File.Exists(exe))
File.Delete(exe);
File.Move(compiledAssembly, exe);
Console.WriteLine("The executable " + exe + " has been generated.");
}
else
{
Console.WriteLine("Could not generate executable...");
}
}
finally
{
try
{
if (File.Exists(resource))
File.Delete(resource);
}
catch { }
try
{
if (File.Exists(xapSource))
File.Delete(xapSource);
}
catch { }
}
}
public static byte[] GetXAPFromResources()
{
try
{
var res = new ResourceManager("SLViewer", Assembly.GetExecutingAssembly());
return (byte[])res.GetObject("content.xap"); ;
}
catch
{
return null;
}
}
public static string ToResource(string xapFile)
{
var resourceFile = Path.Combine(Path.GetTempPath(), "SLViewer.resources");
using (var resourceWriter = new ResourceWriter(resourceFile))
{
resourceWriter.AddResource("content.xap", File.ReadAllBytes(xapFile));
resourceWriter.Generate();
}
return resourceFile;
}
}
public class SLViewer : Form
{
WebBrowser browser = new WebBrowser();
TcpListener webServer;
byte[] xapData;
public SLViewer(string xapFile)
{
if (!File.Exists(xapFile))
throw new Exception("Cannot find " + xapFile);
xapData = File.ReadAllBytes(xapFile);
Text = "Silverlight Viewer - " + Path.GetFileName(xapFile);
Init();
}
public SLViewer(byte[] xapData, string xapName)
{
this.xapData = xapData;
Text = xapName;
Init();
}
void Init()
{
double offsetRatio = 0.8;
Size = new Size((int)((double)Screen.PrimaryScreen.Bounds.Size.Width * offsetRatio),
(int)((double)Screen.PrimaryScreen.Bounds.Height * offsetRatio));
StartPosition = FormStartPosition.CenterScreen;
browser.Dock = DockStyle.Fill;
Controls.Add(this.browser);
string url = "http://localhost:" + InitWebServer();
browser.Url = new Uri(url);
}
int InitWebServer()
{
webServer = new TcpListener(IPAddress.Any, 0);
webServer.Start();
BeginGetReponse(null);
return ((IPEndPoint)webServer.LocalEndpoint).Port;
}
void BeginGetReponse(IAsyncResult result)
{
if (result == null)
{
webServer.BeginAcceptSocket(BeginGetReponse, null);
return;
}
Socket socket = webServer.EndAcceptSocket(result);
if (socket.Connected)
{
var request = new byte[1024];
int count = socket.Receive(request);
string requestType = Encoding.ASCII.GetString(request, 0, count)
.Split(' ')[1];
if (requestType == "/")
{
byte[] response = Encoding.ASCII.GetBytes(
@"<HTML>
<HEAD>
<TITLE>DeskLight</TITLE>
</HEAD>
<BODY>
<OBJECT TYPE=""application/x-silverlight"" Width=""100%"" Height=""100%"">
<param name=""Source"" value=""silverlight.xap"" />
</OBJECT>
</BODY>
</HTML>");
byte[] headerData = Encoding.ASCII.GetBytes(
@"HTTP/1.1 200 OK\r\n" +
"Server: WeirdThing1.1\r\n" +
"Content-Type: text/html\r\n" +
"Accept-Ranges: bytes\r\n" +
"Content-Length: " + response.Length + "\r\n" +
"Connection: Close" + "\r\n\r\n");
socket.Send(headerData);
socket.Send(response);
}
else if (requestType.Contains("xap"))
{
var httpHeaderData = Encoding.ASCII.GetBytes(
@"HTTP/1.1 200 OK\r\n" +
"Server: WeirdThing1.1\r\n" +
"Content-Type: application/xap\r\n" +
"Accept-Ranges: bytes\r\n" +
"Content-Length: " + xapData.Length + "\r\n" +
"Connection: Close" + "\r\n\r\n");
socket.Send(httpHeaderData);
socket.Send(xapData);
}
else
{
Console.WriteLine("Unknown request received...");
}
}
webServer.BeginAcceptSocket(BeginGetReponse, null);
}
}
| |
//
// Encog(tm) Core v3.2 - .Net Version
// http://www.heatonresearch.com/encog/
//
// Copyright 2008-2014 Heaton Research, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
using System;
using System.Collections.Generic;
using System.IO;
using System.Reflection;
using Encog.Engine.Network.Activation;
using Encog.MathUtil.RBF;
using Encog.Neural.Flat;
using Encog.Neural.Networks;
using Encog.Neural.RBF;
using Encog.Persist;
using Encog.Util;
using Encog.Util.CSV;
namespace Encog.Neural.Rbf
{
/// <summary>
/// Persist a RBF network.
/// </summary>
///
public class PersistRBFNetwork : IEncogPersistor
{
#region EncogPersistor Members
/// <inheritdoc/>
public virtual int FileVersion
{
get { return 1; }
}
/// <inheritdoc/>
public virtual String PersistClassString
{
get { return "RBFNetwork"; }
}
/// <inheritdoc/>
public Object Read(Stream mask0)
{
var result = new RBFNetwork();
var flat = (FlatNetworkRBF) result.Flat;
var ins0 = new EncogReadHelper(mask0);
EncogFileSection section;
while ((section = ins0.ReadNextSection()) != null)
{
if (section.SectionName.Equals("RBF-NETWORK")
&& section.SubSectionName.Equals("PARAMS"))
{
IDictionary<String, String> paras = section.ParseParams();
EngineArray.PutAll(paras, result.Properties);
}
if (section.SectionName.Equals("RBF-NETWORK")
&& section.SubSectionName.Equals("NETWORK"))
{
IDictionary<String, String> p = section.ParseParams();
flat.BeginTraining = EncogFileSection.ParseInt(p,
BasicNetwork.TagBeginTraining);
flat.ConnectionLimit = EncogFileSection.ParseDouble(p,
BasicNetwork.TagConnectionLimit);
flat.ContextTargetOffset = EncogFileSection.ParseIntArray(
p, BasicNetwork.TagContextTargetOffset);
flat.ContextTargetSize = EncogFileSection.ParseIntArray(
p, BasicNetwork.TagContextTargetSize);
flat.EndTraining = EncogFileSection.ParseInt(p,
BasicNetwork.TagEndTraining);
flat.HasContext = EncogFileSection.ParseBoolean(p,
BasicNetwork.TagHasContext);
flat.InputCount = EncogFileSection.ParseInt(p,
PersistConst.InputCount);
flat.LayerCounts = EncogFileSection.ParseIntArray(p,
BasicNetwork.TagLayerCounts);
flat.LayerFeedCounts = EncogFileSection.ParseIntArray(p,
BasicNetwork.TagLayerFeedCounts);
flat.LayerContextCount = EncogFileSection.ParseIntArray(p, BasicNetwork.TagLayerContextCount);
flat.LayerIndex = EncogFileSection.ParseIntArray(p,
BasicNetwork.TagLayerIndex);
flat.LayerOutput = section.ParseDoubleArray(p,
PersistConst.Output);
flat.LayerSums = new double[flat.LayerOutput.Length];
flat.OutputCount = EncogFileSection.ParseInt(p,PersistConst.OutputCount);
flat.WeightIndex = EncogFileSection.ParseIntArray(p,
BasicNetwork.TagWeightIndex);
flat.Weights = section.ParseDoubleArray(p,
PersistConst.Weights);
flat.BiasActivation = section.ParseDoubleArray(p, BasicNetwork.TagBiasActivation);
}
else if (section.SectionName.Equals("RBF-NETWORK")
&& section.SubSectionName.Equals("ACTIVATION"))
{
int index = 0;
flat.ActivationFunctions = new IActivationFunction[flat.LayerCounts.Length];
foreach (String line in section.Lines)
{
IActivationFunction af;
IList<String> cols = EncogFileSection
.SplitColumns(line);
String name = ReflectionUtil.AfPath
+ cols[0];
try
{
af = (IActivationFunction) ReflectionUtil.LoadObject(name);
}
catch (Exception e)
{
throw new PersistError(e);
}
for (int i = 0; i < af.ParamNames.Length; i++)
{
af.Params[i] = CSVFormat.EgFormat.Parse(cols[i + 1]);
}
flat.ActivationFunctions[index++] = af;
}
}
else if (section.SectionName.Equals("RBF-NETWORK")
&& section.SubSectionName.Equals("RBF"))
{
int index = 0;
int hiddenCount = flat.LayerCounts[1];
int inputCount = flat.LayerCounts[2];
flat.RBF = new IRadialBasisFunction[hiddenCount];
foreach (String line in section.Lines)
{
IRadialBasisFunction rbf;
IList<String> cols = EncogFileSection
.SplitColumns(line);
String name = ReflectionUtil.RBFPath + cols[0];
try
{
rbf = (IRadialBasisFunction) ReflectionUtil.LoadObject(name);
}
catch (TypeLoadException ex)
{
throw new PersistError(ex);
}
catch (TargetException ex)
{
throw new PersistError(ex);
}
catch (MemberAccessException ex)
{
throw new PersistError(ex);
}
rbf.Width = CSVFormat.EgFormat.Parse(cols[1]);
rbf.Peak = CSVFormat.EgFormat.Parse(cols[2]);
rbf.Centers = new double[inputCount];
for (int i = 0; i < inputCount; i++)
{
rbf.Centers[i] = CSVFormat.EgFormat.Parse(cols[i + 3]);
}
flat.RBF[index++] = rbf;
}
}
}
return result;
}
/// <inheritdoc/>
public void Save(Stream os, Object obj)
{
var xout = new EncogWriteHelper(os);
var net = (RBFNetwork) obj;
var flat = (FlatNetworkRBF) net.Flat;
xout.AddSection("RBF-NETWORK");
xout.AddSubSection("PARAMS");
xout.AddProperties(net.Properties);
xout.AddSubSection("NETWORK");
xout.WriteProperty(BasicNetwork.TagBeginTraining,
flat.BeginTraining);
xout.WriteProperty(BasicNetwork.TagConnectionLimit,
flat.ConnectionLimit);
xout.WriteProperty(BasicNetwork.TagContextTargetOffset,
flat.ContextTargetOffset);
xout.WriteProperty(BasicNetwork.TagContextTargetSize,
flat.ContextTargetSize);
xout.WriteProperty(BasicNetwork.TagEndTraining, flat.EndTraining);
xout.WriteProperty(BasicNetwork.TagHasContext, flat.HasContext);
xout.WriteProperty(PersistConst.InputCount, flat.InputCount);
xout.WriteProperty(BasicNetwork.TagLayerCounts, flat.LayerCounts);
xout.WriteProperty(BasicNetwork.TagLayerFeedCounts,
flat.LayerFeedCounts);
xout.WriteProperty(BasicNetwork.TagLayerContextCount,
flat.LayerContextCount);
xout.WriteProperty(BasicNetwork.TagLayerIndex, flat.LayerIndex);
xout.WriteProperty(PersistConst.Output, flat.LayerOutput);
xout.WriteProperty(PersistConst.OutputCount, flat.OutputCount);
xout.WriteProperty(BasicNetwork.TagWeightIndex, flat.WeightIndex);
xout.WriteProperty(PersistConst.Weights, flat.Weights);
xout.WriteProperty(BasicNetwork.TagBiasActivation,
flat.BiasActivation);
xout.AddSubSection("ACTIVATION");
foreach (IActivationFunction af in flat.ActivationFunctions)
{
xout.AddColumn(af.GetType().Name);
foreach (double t in af.Params)
{
xout.AddColumn(t);
}
xout.WriteLine();
}
xout.AddSubSection("RBF");
foreach (IRadialBasisFunction rbf in flat.RBF)
{
xout.AddColumn(rbf.GetType().Name);
xout.AddColumn(rbf.Width);
xout.AddColumn(rbf.Peak);
foreach (double t in rbf.Centers)
{
xout.AddColumn(t);
}
xout.WriteLine();
}
xout.Flush();
}
/// <inheritdoc/>
public Type NativeType
{
get { return typeof(RBFNetwork); }
}
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Network
{
using Microsoft.Azure;
using Microsoft.Azure.Management;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Microsoft.Rest.Serialization;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// Network Client
/// </summary>
public partial class NetworkManagementClient : ServiceClient<NetworkManagementClient>, INetworkManagementClient, IAzureClient
{
/// <summary>
/// The base URI of the service.
/// </summary>
public System.Uri BaseUri { get; set; }
/// <summary>
/// Gets or sets json serialization settings.
/// </summary>
public JsonSerializerSettings SerializationSettings { get; private set; }
/// <summary>
/// Gets or sets json deserialization settings.
/// </summary>
public JsonSerializerSettings DeserializationSettings { get; private set; }
/// <summary>
/// Credentials needed for the client to connect to Azure.
/// </summary>
public ServiceClientCredentials Credentials { get; private set; }
/// <summary>
/// The subscription credentials which uniquely identify the Microsoft Azure
/// subscription. The subscription ID forms part of the URI for every service
/// call.
/// </summary>
public string SubscriptionId { get; set; }
/// <summary>
/// Gets or sets the preferred language for the response.
/// </summary>
public string AcceptLanguage { get; set; }
/// <summary>
/// Gets or sets the retry timeout in seconds for Long Running Operations.
/// Default value is 30.
/// </summary>
public int? LongRunningOperationRetryTimeout { get; set; }
/// <summary>
/// When set to true a unique x-ms-client-request-id value is generated and
/// included in each request. Default is true.
/// </summary>
public bool? GenerateClientRequestId { get; set; }
/// <summary>
/// Gets the IApplicationGatewaysOperations.
/// </summary>
public virtual IApplicationGatewaysOperations ApplicationGateways { get; private set; }
/// <summary>
/// Gets the IAvailableEndpointServicesOperations.
/// </summary>
public virtual IAvailableEndpointServicesOperations AvailableEndpointServices { get; private set; }
/// <summary>
/// Gets the IExpressRouteCircuitAuthorizationsOperations.
/// </summary>
public virtual IExpressRouteCircuitAuthorizationsOperations ExpressRouteCircuitAuthorizations { get; private set; }
/// <summary>
/// Gets the IExpressRouteCircuitPeeringsOperations.
/// </summary>
public virtual IExpressRouteCircuitPeeringsOperations ExpressRouteCircuitPeerings { get; private set; }
/// <summary>
/// Gets the IExpressRouteCircuitsOperations.
/// </summary>
public virtual IExpressRouteCircuitsOperations ExpressRouteCircuits { get; private set; }
/// <summary>
/// Gets the IExpressRouteServiceProvidersOperations.
/// </summary>
public virtual IExpressRouteServiceProvidersOperations ExpressRouteServiceProviders { get; private set; }
/// <summary>
/// Gets the ILoadBalancersOperations.
/// </summary>
public virtual ILoadBalancersOperations LoadBalancers { get; private set; }
/// <summary>
/// Gets the ILoadBalancerBackendAddressPoolsOperations.
/// </summary>
public virtual ILoadBalancerBackendAddressPoolsOperations LoadBalancerBackendAddressPools { get; private set; }
/// <summary>
/// Gets the ILoadBalancerFrontendIPConfigurationsOperations.
/// </summary>
public virtual ILoadBalancerFrontendIPConfigurationsOperations LoadBalancerFrontendIPConfigurations { get; private set; }
/// <summary>
/// Gets the IInboundNatRulesOperations.
/// </summary>
public virtual IInboundNatRulesOperations InboundNatRules { get; private set; }
/// <summary>
/// Gets the ILoadBalancerLoadBalancingRulesOperations.
/// </summary>
public virtual ILoadBalancerLoadBalancingRulesOperations LoadBalancerLoadBalancingRules { get; private set; }
/// <summary>
/// Gets the ILoadBalancerNetworkInterfacesOperations.
/// </summary>
public virtual ILoadBalancerNetworkInterfacesOperations LoadBalancerNetworkInterfaces { get; private set; }
/// <summary>
/// Gets the ILoadBalancerProbesOperations.
/// </summary>
public virtual ILoadBalancerProbesOperations LoadBalancerProbes { get; private set; }
/// <summary>
/// Gets the INetworkInterfacesOperations.
/// </summary>
public virtual INetworkInterfacesOperations NetworkInterfaces { get; private set; }
/// <summary>
/// Gets the INetworkInterfaceIPConfigurationsOperations.
/// </summary>
public virtual INetworkInterfaceIPConfigurationsOperations NetworkInterfaceIPConfigurations { get; private set; }
/// <summary>
/// Gets the INetworkInterfaceLoadBalancersOperations.
/// </summary>
public virtual INetworkInterfaceLoadBalancersOperations NetworkInterfaceLoadBalancers { get; private set; }
/// <summary>
/// Gets the INetworkSecurityGroupsOperations.
/// </summary>
public virtual INetworkSecurityGroupsOperations NetworkSecurityGroups { get; private set; }
/// <summary>
/// Gets the ISecurityRulesOperations.
/// </summary>
public virtual ISecurityRulesOperations SecurityRules { get; private set; }
/// <summary>
/// Gets the IDefaultSecurityRulesOperations.
/// </summary>
public virtual IDefaultSecurityRulesOperations DefaultSecurityRules { get; private set; }
/// <summary>
/// Gets the INetworkWatchersOperations.
/// </summary>
public virtual INetworkWatchersOperations NetworkWatchers { get; private set; }
/// <summary>
/// Gets the IPacketCapturesOperations.
/// </summary>
public virtual IPacketCapturesOperations PacketCaptures { get; private set; }
/// <summary>
/// Gets the IPublicIPAddressesOperations.
/// </summary>
public virtual IPublicIPAddressesOperations PublicIPAddresses { get; private set; }
/// <summary>
/// Gets the IRouteFiltersOperations.
/// </summary>
public virtual IRouteFiltersOperations RouteFilters { get; private set; }
/// <summary>
/// Gets the IRouteFilterRulesOperations.
/// </summary>
public virtual IRouteFilterRulesOperations RouteFilterRules { get; private set; }
/// <summary>
/// Gets the IRouteTablesOperations.
/// </summary>
public virtual IRouteTablesOperations RouteTables { get; private set; }
/// <summary>
/// Gets the IRoutesOperations.
/// </summary>
public virtual IRoutesOperations Routes { get; private set; }
/// <summary>
/// Gets the IBgpServiceCommunitiesOperations.
/// </summary>
public virtual IBgpServiceCommunitiesOperations BgpServiceCommunities { get; private set; }
/// <summary>
/// Gets the IUsagesOperations.
/// </summary>
public virtual IUsagesOperations Usages { get; private set; }
/// <summary>
/// Gets the IVirtualNetworksOperations.
/// </summary>
public virtual IVirtualNetworksOperations VirtualNetworks { get; private set; }
/// <summary>
/// Gets the ISubnetsOperations.
/// </summary>
public virtual ISubnetsOperations Subnets { get; private set; }
/// <summary>
/// Gets the IVirtualNetworkPeeringsOperations.
/// </summary>
public virtual IVirtualNetworkPeeringsOperations VirtualNetworkPeerings { get; private set; }
/// <summary>
/// Gets the IVirtualNetworkGatewaysOperations.
/// </summary>
public virtual IVirtualNetworkGatewaysOperations VirtualNetworkGateways { get; private set; }
/// <summary>
/// Gets the IVirtualNetworkGatewayConnectionsOperations.
/// </summary>
public virtual IVirtualNetworkGatewayConnectionsOperations VirtualNetworkGatewayConnections { get; private set; }
/// <summary>
/// Gets the ILocalNetworkGatewaysOperations.
/// </summary>
public virtual ILocalNetworkGatewaysOperations LocalNetworkGateways { get; private set; }
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected NetworkManagementClient(params DelegatingHandler[] handlers) : base(handlers)
{
Initialize();
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
protected NetworkManagementClient(HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : base(rootHandler, handlers)
{
Initialize();
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected NetworkManagementClient(System.Uri baseUri, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
protected NetworkManagementClient(System.Uri baseUri, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
BaseUri = baseUri;
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public NetworkManagementClient(ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public NetworkManagementClient(ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public NetworkManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, params DelegatingHandler[] handlers) : this(handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
BaseUri = baseUri;
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// Initializes a new instance of the NetworkManagementClient class.
/// </summary>
/// <param name='baseUri'>
/// Optional. The base URI of the service.
/// </param>
/// <param name='credentials'>
/// Required. Credentials needed for the client to connect to Azure.
/// </param>
/// <param name='rootHandler'>
/// Optional. The http client handler used to handle http transport.
/// </param>
/// <param name='handlers'>
/// Optional. The delegating handlers to add to the http client pipeline.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
public NetworkManagementClient(System.Uri baseUri, ServiceClientCredentials credentials, HttpClientHandler rootHandler, params DelegatingHandler[] handlers) : this(rootHandler, handlers)
{
if (baseUri == null)
{
throw new System.ArgumentNullException("baseUri");
}
if (credentials == null)
{
throw new System.ArgumentNullException("credentials");
}
BaseUri = baseUri;
Credentials = credentials;
if (Credentials != null)
{
Credentials.InitializeServiceClient(this);
}
}
/// <summary>
/// An optional partial-method to perform custom initialization.
/// </summary>
partial void CustomInitialize();
/// <summary>
/// Initializes client properties.
/// </summary>
private void Initialize()
{
ApplicationGateways = new ApplicationGatewaysOperations(this);
AvailableEndpointServices = new AvailableEndpointServicesOperations(this);
ExpressRouteCircuitAuthorizations = new ExpressRouteCircuitAuthorizationsOperations(this);
ExpressRouteCircuitPeerings = new ExpressRouteCircuitPeeringsOperations(this);
ExpressRouteCircuits = new ExpressRouteCircuitsOperations(this);
ExpressRouteServiceProviders = new ExpressRouteServiceProvidersOperations(this);
LoadBalancers = new LoadBalancersOperations(this);
LoadBalancerBackendAddressPools = new LoadBalancerBackendAddressPoolsOperations(this);
LoadBalancerFrontendIPConfigurations = new LoadBalancerFrontendIPConfigurationsOperations(this);
InboundNatRules = new InboundNatRulesOperations(this);
LoadBalancerLoadBalancingRules = new LoadBalancerLoadBalancingRulesOperations(this);
LoadBalancerNetworkInterfaces = new LoadBalancerNetworkInterfacesOperations(this);
LoadBalancerProbes = new LoadBalancerProbesOperations(this);
NetworkInterfaces = new NetworkInterfacesOperations(this);
NetworkInterfaceIPConfigurations = new NetworkInterfaceIPConfigurationsOperations(this);
NetworkInterfaceLoadBalancers = new NetworkInterfaceLoadBalancersOperations(this);
NetworkSecurityGroups = new NetworkSecurityGroupsOperations(this);
SecurityRules = new SecurityRulesOperations(this);
DefaultSecurityRules = new DefaultSecurityRulesOperations(this);
NetworkWatchers = new NetworkWatchersOperations(this);
PacketCaptures = new PacketCapturesOperations(this);
PublicIPAddresses = new PublicIPAddressesOperations(this);
RouteFilters = new RouteFiltersOperations(this);
RouteFilterRules = new RouteFilterRulesOperations(this);
RouteTables = new RouteTablesOperations(this);
Routes = new RoutesOperations(this);
BgpServiceCommunities = new BgpServiceCommunitiesOperations(this);
Usages = new UsagesOperations(this);
VirtualNetworks = new VirtualNetworksOperations(this);
Subnets = new SubnetsOperations(this);
VirtualNetworkPeerings = new VirtualNetworkPeeringsOperations(this);
VirtualNetworkGateways = new VirtualNetworkGatewaysOperations(this);
VirtualNetworkGatewayConnections = new VirtualNetworkGatewayConnectionsOperations(this);
LocalNetworkGateways = new LocalNetworkGatewaysOperations(this);
BaseUri = new System.Uri("https://management.azure.com");
AcceptLanguage = "en-US";
LongRunningOperationRetryTimeout = 30;
GenerateClientRequestId = true;
SerializationSettings = new JsonSerializerSettings
{
Formatting = Newtonsoft.Json.Formatting.Indented,
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
SerializationSettings.Converters.Add(new TransformationJsonConverter());
DeserializationSettings = new JsonSerializerSettings
{
DateFormatHandling = Newtonsoft.Json.DateFormatHandling.IsoDateFormat,
DateTimeZoneHandling = Newtonsoft.Json.DateTimeZoneHandling.Utc,
NullValueHandling = Newtonsoft.Json.NullValueHandling.Ignore,
ReferenceLoopHandling = Newtonsoft.Json.ReferenceLoopHandling.Serialize,
ContractResolver = new ReadOnlyJsonContractResolver(),
Converters = new List<JsonConverter>
{
new Iso8601TimeSpanConverter()
}
};
CustomInitialize();
DeserializationSettings.Converters.Add(new TransformationJsonConverter());
DeserializationSettings.Converters.Add(new CloudErrorJsonConverter());
}
/// <summary>
/// Checks whether a domain name in the cloudapp.net zone is available for use.
/// </summary>
/// <param name='location'>
/// The location of the domain name.
/// </param>
/// <param name='domainNameLabel'>
/// The domain name to be verified. It must conform to the following regular
/// expression: ^[a-z][a-z0-9-]{1,61}[a-z0-9]$.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<DnsNameAvailabilityResult>> CheckDnsNameAvailabilityWithHttpMessagesAsync(string location, string domainNameLabel = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (location == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "location");
}
if (SubscriptionId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.SubscriptionId");
}
string apiVersion = "2017-08-01";
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("location", location);
tracingParameters.Add("domainNameLabel", domainNameLabel);
tracingParameters.Add("apiVersion", apiVersion);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "CheckDnsNameAvailability", tracingParameters);
}
// Construct URL
var _baseUrl = BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "subscriptions/{subscriptionId}/providers/Microsoft.Network/locations/{location}/CheckDnsNameAvailability").ToString();
_url = _url.Replace("{location}", System.Uri.EscapeDataString(location));
_url = _url.Replace("{subscriptionId}", System.Uri.EscapeDataString(SubscriptionId));
List<string> _queryParameters = new List<string>();
if (domainNameLabel != null)
{
_queryParameters.Add(string.Format("domainNameLabel={0}", System.Uri.EscapeDataString(domainNameLabel)));
}
if (apiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(apiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (GenerateClientRequestId != null && GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new CloudException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
CloudError _errorBody = SafeJsonConvert.DeserializeObject<CloudError>(_responseContent, DeserializationSettings);
if (_errorBody != null)
{
ex = new CloudException(_errorBody.Message);
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
ex.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<DnsNameAvailabilityResult>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<DnsNameAvailabilityResult>(_responseContent, DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using UnityEngine;
using System.Collections.Generic;
#if UNITY_5_5_OR_NEWER
using UnityEngine.Profiling;
#endif
namespace Pathfinding.Util {
/** Helper for drawing Gizmos in a performant way.
* This is a replacement for the Unity Gizmos class as that is not very performant
* when drawing very large amounts of geometry (for example a large grid graph).
* These gizmos can be persistent, so if the data does not change, the gizmos
* do not need to be updated.
*
* How to use
* - Create a Hasher object and hash whatever data you will be using to draw the gizmos
* Could be for example the positions of the vertices or something. Just as long as
* if the gizmos should change, then the hash changes as well.
* - Check if a cached mesh exists for that hash
* - If not, then create a Builder object and call the drawing methods until you are done
* and then call Finalize with a reference to a gizmos class and the hash you calculated before.
* - Call gizmos.Draw with the hash.
* - When you are done with drawing gizmos for this frame, call gizmos.FinalizeDraw
*
* \code
* var a = Vector3.zero;
* var b = Vector3.one;
* var color = Color.red;
* var hasher = new RetainedGizmos.Hasher();
* hasher.AddHash(a.GetHashCode());
* hasher.AddHash(b.GetHashCode());
* hasher.AddHash(color.GetHashCode());
* if (!gizmos.Draw(hasher)) {
* using (var helper = gizmos.GetGizmoHelper(active, hasher)) {
* builder.DrawLine(a, b, color);
* builder.Finalize(gizmos, hasher);
* }
* }
* \endcode
*/
public class RetainedGizmos {
/** Combines hashes into a single hash value */
public struct Hasher {
ulong hash;
bool includePathSearchInfo;
PathHandler debugData;
public Hasher (AstarPath active) {
hash = 0;
this.debugData = active.debugPathData;
includePathSearchInfo = debugData != null && (active.debugMode == GraphDebugMode.F || active.debugMode == GraphDebugMode.G || active.debugMode == GraphDebugMode.H || active.showSearchTree);
AddHash((int)active.debugMode);
AddHash(active.debugFloor.GetHashCode());
AddHash(active.debugRoof.GetHashCode());
}
public void AddHash (int hash) {
this.hash = (1572869UL * this.hash) ^ (ulong)hash;
}
public void HashNode (GraphNode node) {
AddHash(node.GetGizmoHashCode());
if (includePathSearchInfo) {
var pathNode = debugData.GetPathNode(node.NodeIndex);
AddHash((int)pathNode.pathID);
AddHash(pathNode.pathID == debugData.PathID ? 1 : 0);
AddHash((int) pathNode.F);
}
}
public ulong Hash {
get {
return hash;
}
}
}
/** Helper for drawing gizmos */
public class Builder : IAstarPooledObject {
List<Vector3> lines = new List<Vector3>();
List<Color32> lineColors = new List<Color32>();
List<Mesh> meshes = new List<Mesh>();
public void DrawMesh (RetainedGizmos gizmos, Vector3[] vertices, List<int> triangles, Color[] colors) {
var mesh = gizmos.GetMesh();
// Set all data on the mesh
mesh.vertices = vertices;
mesh.SetTriangles(triangles, 0);
mesh.colors = colors;
// Upload all data and mark the mesh as unreadable
mesh.UploadMeshData(true);
meshes.Add(mesh);
}
/** Draws a wire cube after being transformed the specified transformation */
public void DrawWireCube (GraphTransform tr, Bounds bounds, Color color) {
var min = bounds.min;
var max = bounds.max;
DrawLine(tr.Transform(new Vector3(min.x, min.y, min.z)), tr.Transform(new Vector3(max.x, min.y, min.z)), color);
DrawLine(tr.Transform(new Vector3(max.x, min.y, min.z)), tr.Transform(new Vector3(max.x, min.y, max.z)), color);
DrawLine(tr.Transform(new Vector3(max.x, min.y, max.z)), tr.Transform(new Vector3(min.x, min.y, max.z)), color);
DrawLine(tr.Transform(new Vector3(min.x, min.y, max.z)), tr.Transform(new Vector3(min.x, min.y, min.z)), color);
DrawLine(tr.Transform(new Vector3(min.x, max.y, min.z)), tr.Transform(new Vector3(max.x, max.y, min.z)), color);
DrawLine(tr.Transform(new Vector3(max.x, max.y, min.z)), tr.Transform(new Vector3(max.x, max.y, max.z)), color);
DrawLine(tr.Transform(new Vector3(max.x, max.y, max.z)), tr.Transform(new Vector3(min.x, max.y, max.z)), color);
DrawLine(tr.Transform(new Vector3(min.x, max.y, max.z)), tr.Transform(new Vector3(min.x, max.y, min.z)), color);
DrawLine(tr.Transform(new Vector3(min.x, min.y, min.z)), tr.Transform(new Vector3(min.x, max.y, min.z)), color);
DrawLine(tr.Transform(new Vector3(max.x, min.y, min.z)), tr.Transform(new Vector3(max.x, max.y, min.z)), color);
DrawLine(tr.Transform(new Vector3(max.x, min.y, max.z)), tr.Transform(new Vector3(max.x, max.y, max.z)), color);
DrawLine(tr.Transform(new Vector3(min.x, min.y, max.z)), tr.Transform(new Vector3(min.x, max.y, max.z)), color);
}
public void DrawLine (Vector3 start, Vector3 end, Color color) {
lines.Add(start);
lines.Add(end);
var col32 = (Color32)color;
lineColors.Add(col32);
lineColors.Add(col32);
}
public void Submit (RetainedGizmos gizmos, Hasher hasher) {
SubmitLines(gizmos, hasher.Hash);
SubmitMeshes(gizmos, hasher.Hash);
}
void SubmitMeshes (RetainedGizmos gizmos, ulong hash) {
for (int i = 0; i < meshes.Count; i++) {
gizmos.meshes.Add(new MeshWithHash { hash = hash, mesh = meshes[i], lines = false });
gizmos.existingHashes.Add(hash);
}
}
void SubmitLines (RetainedGizmos gizmos, ulong hash) {
// Unity only supports 65535 vertices per mesh. 65532 used because MaxLineEndPointsPerBatch needs to be even.
const int MaxLineEndPointsPerBatch = 65532/2;
int batches = (lines.Count + MaxLineEndPointsPerBatch - 1)/MaxLineEndPointsPerBatch;
for (int batch = 0; batch < batches; batch++) {
int startIndex = MaxLineEndPointsPerBatch * batch;
int endIndex = Mathf.Min(startIndex + MaxLineEndPointsPerBatch, lines.Count);
int lineEndPointCount = endIndex - startIndex;
UnityEngine.Assertions.Assert.IsTrue(lineEndPointCount % 2 == 0);
// Use pooled lists to avoid excessive allocations
var vertices = ListPool<Vector3>.Claim(lineEndPointCount*2);
var colors = ListPool<Color32>.Claim(lineEndPointCount*2);
var normals = ListPool<Vector3>.Claim(lineEndPointCount*2);
var uv = ListPool<Vector2>.Claim(lineEndPointCount*2);
var tris = ListPool<int>.Claim(lineEndPointCount*3);
// Loop through each endpoint of the lines
// and add 2 vertices for each
for (int j = startIndex; j < endIndex; j++) {
var vertex = (Vector3)lines[j];
vertices.Add(vertex);
vertices.Add(vertex);
var color = (Color32)lineColors[j];
colors.Add(color);
colors.Add(color);
uv.Add(new Vector2(0, 0));
uv.Add(new Vector2(1, 0));
}
// Loop through each line and add
// one normal for each vertex
for (int j = startIndex; j < endIndex; j += 2) {
var lineDir = (Vector3)(lines[j+1] - lines[j]);
// Store the line direction in the normals.
// A line consists of 4 vertices. The line direction will be used to
// offset the vertices to create a line with a fixed pixel thickness
normals.Add(lineDir);
normals.Add(lineDir);
normals.Add(lineDir);
normals.Add(lineDir);
}
// Setup triangle indices
// A triangle consists of 3 indices
// A line (4 vertices) consists of 2 triangles, so 6 triangle indices
for (int j = 0, v = 0; j < lineEndPointCount*3; j += 6, v += 4) {
// First triangle
tris.Add(v+0);
tris.Add(v+1);
tris.Add(v+2);
// Second triangle
tris.Add(v+1);
tris.Add(v+3);
tris.Add(v+2);
}
var mesh = gizmos.GetMesh();
// Set all data on the mesh
mesh.SetVertices(vertices);
mesh.SetTriangles(tris, 0);
mesh.SetColors(colors);
mesh.SetNormals(normals);
mesh.SetUVs(0, uv);
// Upload all data and mark the mesh as unreadable
mesh.UploadMeshData(true);
// Release the lists back to the pool
ListPool<Vector3>.Release(vertices);
ListPool<Color32>.Release(colors);
ListPool<Vector3>.Release(normals);
ListPool<Vector2>.Release(uv);
ListPool<int>.Release(tris);
gizmos.meshes.Add(new MeshWithHash { hash = hash, mesh = mesh, lines = true });
gizmos.existingHashes.Add(hash);
}
}
void IAstarPooledObject.OnEnterPool () {
lines.Clear();
lineColors.Clear();
meshes.Clear();
}
}
struct MeshWithHash {
public ulong hash;
public Mesh mesh;
public bool lines;
}
List<MeshWithHash> meshes = new List<MeshWithHash>();
HashSet<ulong> usedHashes = new HashSet<ulong>();
HashSet<ulong> existingHashes = new HashSet<ulong>();
Stack<Mesh> cachedMeshes = new Stack<Mesh>();
public GraphGizmoHelper GetSingleFrameGizmoHelper (AstarPath active) {
var uniqHash = new RetainedGizmos.Hasher();
uniqHash.AddHash(Time.realtimeSinceStartup.GetHashCode());
Draw(uniqHash);
return GetGizmoHelper(active, uniqHash);
}
public GraphGizmoHelper GetGizmoHelper (AstarPath active, Hasher hasher) {
var helper = ObjectPool<GraphGizmoHelper>.Claim();
helper.Init(active, hasher, this);
return helper;
}
void PoolMesh (Mesh mesh) {
mesh.Clear();
cachedMeshes.Push(mesh);
}
Mesh GetMesh () {
if (cachedMeshes.Count > 0) {
return cachedMeshes.Pop();
} else {
return new Mesh {
hideFlags = HideFlags.DontSave
};
}
}
/** Material to use for the navmesh in the editor */
public Material surfaceMaterial;
/** Material to use for the navmesh outline in the editor */
public Material lineMaterial;
/** True if there already is a mesh with the specified hash */
public bool HasCachedMesh (Hasher hasher) {
return existingHashes.Contains(hasher.Hash);
}
/** Schedules the meshes for the specified hash to be drawn.
* \returns False if there is no cached mesh for this hash, you may want to
* submit one in that case. The draw command will be issued regardless of the return value.
*/
public bool Draw (Hasher hasher) {
usedHashes.Add(hasher.Hash);
return HasCachedMesh(hasher);
}
/** Schedules all meshes that were drawn the last frame (last time FinalizeDraw was called) to be drawn again.
* Also draws any new meshes that have been added since FinalizeDraw was last called.
*/
public void DrawExisting () {
for (int i = 0; i < meshes.Count; i++) {
usedHashes.Add(meshes[i].hash);
}
}
/** Call after all #Draw commands for the frame have been done to draw everything */
public void FinalizeDraw () {
RemoveUnusedMeshes(meshes);
var cam = Camera.current;
var planes = GeometryUtility.CalculateFrustumPlanes(cam);
// Silently do nothing if the materials are not set
if (surfaceMaterial == null || lineMaterial == null) return;
Profiler.BeginSample("Draw Retained Gizmos");
// First surfaces, then lines
for (int matIndex = 0; matIndex <= 1; matIndex++) {
var mat = matIndex == 0 ? surfaceMaterial : lineMaterial;
for (int pass = 0; pass < mat.passCount; pass++) {
mat.SetPass(pass);
for (int i = 0; i < meshes.Count; i++) {
if (meshes[i].lines == (mat == lineMaterial) && GeometryUtility.TestPlanesAABB(planes, meshes[i].mesh.bounds)) {
Graphics.DrawMeshNow(meshes[i].mesh, Matrix4x4.identity);
}
}
}
}
usedHashes.Clear();
Profiler.EndSample();
}
/** Destroys all cached meshes.
* Used to make sure that no memory leaks happen in the Unity Editor.
*/
public void ClearCache () {
usedHashes.Clear();
RemoveUnusedMeshes(meshes);
while (cachedMeshes.Count > 0) {
Mesh.DestroyImmediate(cachedMeshes.Pop());
}
UnityEngine.Assertions.Assert.IsTrue(meshes.Count == 0);
}
void RemoveUnusedMeshes (List<MeshWithHash> meshList) {
// Walk the array with two pointers
// i pointing to the entry that should be filled with something
// and j pointing to the entry that is a potential candidate for
// filling the entry at i.
// When j reaches the end of the list it will be reduced in size
for (int i = 0, j = 0; i < meshList.Count; ) {
if (j == meshList.Count) {
j--;
meshList.RemoveAt(j);
} else if (usedHashes.Contains(meshList[j].hash)) {
meshList[i] = meshList[j];
i++;
j++;
} else {
PoolMesh(meshList[j].mesh);
existingHashes.Remove(meshList[j].hash);
j++;
}
}
}
}
}
| |
//***************************************************
//* This file was generated by tool
//* SharpKit
//***************************************************
using System;
using System.Collections.Generic;
using SharpKit.JavaScript;
namespace SharpKit.TinyMCE.dom
{
/// <summary>
/// Utility class for various DOM manipulation and retrival functions.
/// </summary>
[JsType(JsMode.Prototype, Name="tinymce.dom.DOMUtils", Export=false)]
public partial class DOMUtils
{
/// <summary>
/// Adds the specified element to another element or elements.
/// </summary>
/// <param name="a">Optional object collection with arguments to add to the new element(s).</param>
/// <param name="h">Optional inner HTML contents to add for each element.</param>
/// <param name="c">Optional internal state to indicate if it should create or add.</param>
public SharpKit.TinyMCE.dom.Element add(object a, string h, bool c){return null;}
/// <summary>
/// Adds a class to the specified element or elements.
/// </summary>
/// <param name="c">Class name to add to each element.</param>
public string addClass(string c){return null;}
/// <summary>
/// Adds a style element at the top of the document with the specified cssText content.
/// </summary>
/// <param name="cssText">CSS Text style to add to top of head of document.</param>
public object addStyle(string cssText){return null;}
/// <summary>
/// Adds an event handler to the specified object.
/// </summary>
/// <param name="n">Name of event handler to add for example: click.</param>
/// <param name="f">Function to execute when the event occurs.</param>
/// <param name="s">Optional scope to execute the function in.</param>
public object bind(string n, object f, object s){return null;}
/// <summary>
/// Creates a new element.
/// </summary>
/// <param name="n">Name of new element.</param>
/// <param name="a">Optional object name/value collection with element attributes.</param>
/// <param name="h">Optional HTML string to set as inner HTML of the element.</param>
public SharpKit.TinyMCE.dom.Element create(string n, object a, string h){return null;}
/// <summary>
/// Create HTML string for element. The element will be closed unless an empty inner HTML string is passed.
/// </summary>
/// <param name="n">Name of new element.</param>
/// <param name="a">Optional object name/value collection with element attributes.</param>
/// <param name="h">Optional HTML string to set as inner HTML of the element.</param>
public string createHTML(string n, object a, string h){return null;}
/// <summary>
/// Created a new DOM Range object. This will use the native DOM Range API if it's
/// available if it's not it will fallback to the custom TinyMCE implementation.
/// </summary>
public object createRng(){return null;}
/// <summary>
/// Entity decode a string, resolves any HTML entities like å.
/// </summary>
/// <param name="s">String to decode entities on.</param>
public string decode(string s){return null;}
/// <summary>
/// Destroys all internal references to the DOM to solve IE leak issues.
/// </summary>
public object destroy(){return null;}
/// <summary>
/// Constructs a new DOMUtils instance. Consult the Wiki for more details on settings etc for this class.
/// </summary>
/// <param name="d">Document reference to bind the utility class to.</param>
/// <param name="s">Optional settings collection.</param>
public DOMUtils(object d, object s){}
/// <summary>
/// Entity encodes a string, encodes the most common entities <>"& into entities.
/// </summary>
/// <param name="text">String to encode with entities.</param>
public string encode(string text){return null;}
/// <summary>
/// Find the common ancestor of two elements. This is a shorter method than using the DOM Range logic.
/// </summary>
/// <param name="a">Element to find common ancestor of.</param>
/// <param name="b">Element to find common ancestor of.</param>
public SharpKit.TinyMCE.dom.Element findCommonAncestor(SharpKit.TinyMCE.dom.Element a, SharpKit.TinyMCE.dom.Element b){return null;}
/// <summary>
/// Fires the specified event name with object on target.
/// </summary>
/// <param name="name">Name of the event to fire.</param>
/// <param name="evt">Event object to send.</param>
public object fire(string name, object evt){return null;}
/// <summary>
/// Returns the specified element by ID or the input element if it isn't a string.
/// </summary>
public SharpKit.TinyMCE.dom.Element get(){return null;}
/// <summary>
/// Returns the specified attribute by name.
/// </summary>
/// <param name="n">Name of attribute to get.</param>
/// <param name="dv">Optional default value to return if the attribute didn't exist.</param>
public string getAttrib(string n, string dv){return null;}
/// <summary>
/// Returns an NodeList with attributes for the element.
/// </summary>
public object getAttribs(){return null;}
/// <summary>
/// Returns a array of all single CSS classes in the document. A single CSS class is a simple
/// rule like ".class" complex ones like "div td.class" will not be added to output.
/// </summary>
public Array getClasses(){return null;}
/// <summary>
/// Returns the next node that matches selector or function
/// </summary>
/// <param name="node">Node to find siblings from.</param>
public SharpKit.TinyMCE.html.Node getNext(SharpKit.TinyMCE.html.Node node){return null;}
/// <summary>
/// Returns the outer HTML of an element.
/// </summary>
public string getOuterHTML(){return null;}
/// <summary>
/// Returns a node by the specified selector function. This function will
/// loop through all parent nodes and call the specified function for each node.
/// If the function then returns true indicating that it has found what it was looking for, the loop execution will then end
/// and the node it found will be returned.
/// </summary>
/// <param name="f">Selection function to execute on each node or CSS pattern.</param>
/// <param name="r">Optional root element, never go below this point.</param>
public SharpKit.TinyMCE.html.Node getParent(object f, SharpKit.TinyMCE.html.Node r){return null;}
/// <summary>
/// Returns a node list of all parents matching the specified selector function or pattern.
/// If the function then returns true indicating that it has found what it was looking for and that node will be collected.
/// </summary>
/// <param name="f">Selection function to execute on each node or CSS pattern.</param>
/// <param name="r">Optional root element, never go below this point.</param>
public Array getParents(object f, SharpKit.TinyMCE.html.Node r){return null;}
/// <summary>
/// Returns the absolute x, y position of a node. The position will be returned in a object with x, y fields.
/// </summary>
/// <param name="ro">Optional root element to stop calculations at.</param>
public object getPos(SharpKit.TinyMCE.dom.Element ro){return null;}
/// <summary>
/// Returns the previous node that matches selector or function
/// </summary>
/// <param name="node">Node to find siblings from.</param>
public SharpKit.TinyMCE.html.Node getPrev(SharpKit.TinyMCE.html.Node node){return null;}
/// <summary>
/// Returns the rectangle for a specific element.
/// </summary>
public object getRect(){return null;}
/// <summary>
/// Returns the root node of the document this is normally the body but might be a DIV. Parents like getParent will not
/// go above the point of this root node.
/// </summary>
public SharpKit.TinyMCE.dom.Element getRoot(){return null;}
/// <summary>
/// Returns the size dimensions of the specified element.
/// </summary>
public object getSize(){return null;}
/// <summary>
/// Returns the current style or runtime/computed value of a element.
/// </summary>
/// <param name="na">Style name to return.</param>
/// <param name="c">Computed style.</param>
public string getStyle(string na, bool c){return null;}
/// <summary>
/// Returns the viewport of the window.
/// </summary>
/// <param name="w">Optional window to get viewport of.</param>
public object getViewPort(object w){return null;}
/// <summary>
/// Returns true if the specified element has the specified class.
/// </summary>
/// <param name="c">CSS class to check for.</param>
public bool hasClass(string c){return false;}
/// <summary>
/// Hides the specified element(s) by ID by setting the "display" style.
/// </summary>
public object hide(){return null;}
/// <summary>
/// Inserts a element after the reference element.
/// </summary>
/// <param name="node">Element to insert after the reference.</param>
public SharpKit.TinyMCE.dom.Element insertAfter(SharpKit.TinyMCE.dom.Element node){return null;}
/// <summary>
/// Returns true/false if the specified element matches the specified css pattern.
/// </summary>
/// <param name="selector">CSS pattern to match the element agains.</param>
public object @is(string selector){return null;}
/// <summary>
/// Returns true/false if the specified element is a block element or not.
/// </summary>
public bool isBlock(){return false;}
/// <summary>
/// Returns true/false if the specified node is to be considered empty or not.
/// </summary>
/// <param name="elements">Optional name/value object with elements that are automatically treated as non empty elements.</param>
public bool isEmpty(object elements){return false;}
/// <summary>
/// Returns true/false if the element is hidden or not by checking the "display" style.
/// </summary>
public bool isHidden(){return false;}
/// <summary>
/// Imports/loads the specified CSS file into the document bound to the class.
/// </summary>
/// <param name="u">URL to CSS file to load.</param>
public object loadCSS(string u){return null;}
/// <summary>
/// Parses the specified style value into an object collection. This parser will also
/// merge and remove any redundant items that browsers might have added. It will also convert non hex
/// colors to hex values. Urls inside the styles will also be converted to absolute/relative based on settings.
/// </summary>
/// <param name="st">Style value to parse for example: border:1px solid red;.</param>
public object parseStyle(string st){return null;}
/// <summary>
/// Removes/deletes the specified element(s) from the DOM.
/// </summary>
/// <param name="keep_children">Optional state to keep children or not. If set to true all children will be placed at the location of the removed element.</param>
public SharpKit.TinyMCE.dom.Element remove(bool keep_children){return null;}
/// <summary>
/// Removes a class from the specified element or elements.
/// </summary>
/// <param name="c">Class name to remove to each element.</param>
public string removeClass(string c){return null;}
/// <summary>
/// Renames the specified element to a new name and keep it's attributes and children.
/// </summary>
/// <param name="elm">Element to rename.</param>
/// <param name="name">Name of the new element.</param>
public object rename(SharpKit.TinyMCE.dom.Element elm, string name){return null;}
/// <summary>
/// Replaces the specified element or elements with the specified element, the new element will
/// be cloned if multiple inputs elements are passed.
/// </summary>
/// <param name="n">New element to replace old ones with.</param>
/// <param name="k">Optional keep children state, if set to true child nodes from the old object will be added to new ones.</param>
public object replace(SharpKit.TinyMCE.dom.Element n, bool k){return null;}
/// <summary>
/// Executes the specified function on the element by id or dom element node or array of elements/id.
/// </summary>
/// <param name="f">Function to execute for each item.</param>
/// <param name="s">Optional scope to execute the function in.</param>
public object run(object f, object s){return null;}
/// <summary>
/// Selects specific elements by a CSS level 3 pattern. For example "div#a1 p.test".
/// This function is optimized for the most common patterns needed in TinyMCE but it also performes good enough
/// on more complex patterns.
/// </summary>
/// <param name="p">CSS level 1 pattern to select/find elements by.</param>
/// <param name="s">Optional root element/scope element to search in.</param>
public Array select(string p, object s){return null;}
/// <summary>
/// Serializes the specified style object into a string.
/// </summary>
/// <param name="o">Object to serialize as string for example: {border : '1px solid red'}</param>
/// <param name="name">Optional element name.</param>
public string serializeStyle(object o, string name){return null;}
/// <summary>
/// Sets the specified attributes value of a element or elements.
/// </summary>
/// <param name="n">Name of attribute to set.</param>
/// <param name="v">Value to set on the attribute of this value is falsy like null 0 or '' it will remove the attribute instead.</param>
public object setAttrib(string n, string v){return null;}
/// <summary>
/// Sets the specified attributes of a element or elements.
/// </summary>
/// <param name="o">Name/Value collection of attribute items to add to the element(s).</param>
public object setAttribs(object o){return null;}
/// <summary>
/// Sets the specified HTML content inside the element or elements. The HTML will first be processed this means
/// URLs will get converted, hex color values fixed etc. Check processHTML for details.
/// </summary>
/// <param name="h">HTML content to set as inner HTML of the element.</param>
public object setHTML(string h){return null;}
/// <summary>
/// Sets the specified outer HTML on a element or elements.
/// </summary>
/// <param name="h">HTML code to set as outer value for the element.</param>
/// <param name="d">Optional document scope to use in this process defaults to the document of the DOM class.</param>
public object setOuterHTML(object h, object d){return null;}
/// <summary>
/// Sets the CSS style value on a HTML element. The name can be a camelcase string
/// or the CSS style name like background-color.
/// </summary>
/// <param name="na">Name of the style value to set.</param>
/// <param name="v">Value to set on the style.</param>
public object setStyle(string na, string v){return null;}
/// <summary>
/// Sets multiple styles on the specified element(s).
/// </summary>
/// <param name="o">Name/Value collection of style items to add to the element(s).</param>
public object setStyles(object o){return null;}
/// <summary>
/// Shows the specified element(s) by ID by setting the "display" style.
/// </summary>
public object show(){return null;}
/// <summary>
/// Splits an element into two new elements and places the specified split
/// element or element between the new ones. For example splitting the paragraph at the bold element in
/// this example <p>abc<b>abc</b>123</p> would produce <p>abc</p><b>abc</b><p>123</p>.
/// </summary>
/// <param name="pe">Parent element to split.</param>
/// <param name="e">Element to split at.</param>
/// <param name="re">Optional replacement element to replace the split element by.</param>
public SharpKit.TinyMCE.dom.Element split(SharpKit.TinyMCE.dom.Element pe, SharpKit.TinyMCE.dom.Element e, SharpKit.TinyMCE.dom.Element re){return null;}
/// <summary>
/// Parses the specified RGB color value and returns a hex version of that color.
/// </summary>
/// <param name="s">RGB string value like rgb(1,2,3)</param>
public string toHex(string s){return null;}
/// <summary>
/// Removes the specified event handler by name and function from a element or collection of elements.
/// </summary>
/// <param name="n">Event handler name like for example: "click"</param>
/// <param name="f">Function to remove.</param>
public bool unbind(string n, object f){return false;}
/// <summary>
/// Returns a unique id. This can be useful when generating elements on the fly.
/// This method will not check if the element allready exists.
/// </summary>
/// <param name="p">Optional prefix to add infront of all ids defaults to "mce_".</param>
public string uniqueId(string p){return null;}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
namespace Orleans.Runtime
{
internal class AssemblyLoader
{
#if !NETSTANDARD_TODO
private readonly Dictionary<string, SearchOption> dirEnumArgs;
private readonly HashSet<AssemblyLoaderPathNameCriterion> pathNameCriteria;
private readonly HashSet<AssemblyLoaderReflectionCriterion> reflectionCriteria;
private readonly Logger logger;
private readonly Lazy<ExeImageInfo> exeInfo = new Lazy<ExeImageInfo>(LoadImageInformation);
internal bool SimulateExcludeCriteriaFailure { get; set; }
internal bool SimulateLoadCriteriaFailure { get; set; }
internal bool SimulateReflectionOnlyLoadFailure { get; set; }
internal bool RethrowDiscoveryExceptions { get; set; }
private AssemblyLoader(
Dictionary<string, SearchOption> dirEnumArgs,
HashSet<AssemblyLoaderPathNameCriterion> pathNameCriteria,
HashSet<AssemblyLoaderReflectionCriterion> reflectionCriteria,
Logger logger)
{
this.dirEnumArgs = dirEnumArgs;
this.pathNameCriteria = pathNameCriteria;
this.reflectionCriteria = reflectionCriteria;
this.logger = logger;
SimulateExcludeCriteriaFailure = false;
SimulateLoadCriteriaFailure = false;
SimulateReflectionOnlyLoadFailure = false;
RethrowDiscoveryExceptions = false;
}
/// <summary>
/// Loads assemblies according to caller-defined criteria.
/// </summary>
/// <param name="dirEnumArgs">A list of arguments that are passed to Directory.EnumerateFiles().
/// The sum of the DLLs found from these searches is used as a base set of assemblies for
/// criteria to evaluate.</param>
/// <param name="pathNameCriteria">A list of criteria that are used to disqualify
/// assemblies from being loaded based on path name alone (e.g.
/// AssemblyLoaderCriteria.ExcludeFileNames) </param>
/// <param name="reflectionCriteria">A list of criteria that are used to identify
/// assemblies to be loaded based on examination of their ReflectionOnly type
/// information (e.g. AssemblyLoaderCriteria.LoadTypesAssignableFrom).</param>
/// <param name="logger">A logger to provide feedback to.</param>
/// <returns>List of discovered assembly locations</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2001:AvoidCallingProblematicMethods", MessageId = "System.Reflection.Assembly.LoadFrom")]
public static List<string> LoadAssemblies(
Dictionary<string, SearchOption> dirEnumArgs,
IEnumerable<AssemblyLoaderPathNameCriterion> pathNameCriteria,
IEnumerable<AssemblyLoaderReflectionCriterion> reflectionCriteria,
Logger logger)
{
var loader =
NewAssemblyLoader(
dirEnumArgs,
pathNameCriteria,
reflectionCriteria,
logger);
int count = 0;
List<string> discoveredAssemblyLocations = loader.DiscoverAssemblies();
foreach (var pathName in discoveredAssemblyLocations)
{
loader.logger.Info("Loading assembly {0}...", pathName);
// It is okay to use LoadFrom here because we are loading application assemblies deployed to the specific directory.
// Such application assemblies should not be deployed somewhere else, e.g. GAC, so this is safe.
Assembly.LoadFrom(pathName);
++count;
}
loader.logger.Info("{0} assemblies loaded.", count);
return discoveredAssemblyLocations;
}
#endif
public static T TryLoadAndCreateInstance<T>(string assemblyName, Logger logger) where T : class
{
try
{
var assembly = Assembly.Load(new AssemblyName(assemblyName));
var foundType =
TypeUtils.GetTypes(
assembly,
type =>
typeof(T).IsAssignableFrom(type) && !type.GetTypeInfo().IsInterface
&& type.GetConstructor(Type.EmptyTypes) != null, logger).FirstOrDefault();
if (foundType == null)
{
return null;
}
return (T)Activator.CreateInstance(foundType, true);
}
catch (FileNotFoundException exception)
{
logger.Warn(ErrorCode.Loader_TryLoadAndCreateInstance_Failure, exception.Message, exception);
return null;
}
catch (Exception exc)
{
logger.Error(ErrorCode.Loader_TryLoadAndCreateInstance_Failure, exc.Message, exc);
throw;
}
}
public static T LoadAndCreateInstance<T>(string assemblyName, Logger logger) where T : class
{
try
{
var assembly = Assembly.Load(new AssemblyName(assemblyName));
var foundType = TypeUtils.GetTypes(assembly, type => typeof(T).IsAssignableFrom(type), logger).First();
return (T)Activator.CreateInstance(foundType, true);
}
catch (Exception exc)
{
logger.Error(ErrorCode.Loader_LoadAndCreateInstance_Failure, exc.Message, exc);
throw;
}
}
#if !NETSTANDARD_TODO
// this method is internal so that it can be accessed from unit tests, which only test the discovery
// process-- not the actual loading of assemblies.
internal static AssemblyLoader NewAssemblyLoader(
Dictionary<string, SearchOption> dirEnumArgs,
IEnumerable<AssemblyLoaderPathNameCriterion> pathNameCriteria,
IEnumerable<AssemblyLoaderReflectionCriterion> reflectionCriteria,
Logger logger)
{
if (null == dirEnumArgs)
throw new ArgumentNullException("dirEnumArgs");
if (dirEnumArgs.Count == 0)
throw new ArgumentException("At least one directory is necessary in order to search for assemblies.");
HashSet<AssemblyLoaderPathNameCriterion> pathNameCriteriaSet = null == pathNameCriteria
? new HashSet<AssemblyLoaderPathNameCriterion>()
: new HashSet<AssemblyLoaderPathNameCriterion>(pathNameCriteria.Distinct());
if (null == reflectionCriteria || !reflectionCriteria.Any())
throw new ArgumentException("No assemblies will be loaded unless reflection criteria are specified.");
var reflectionCriteriaSet = new HashSet<AssemblyLoaderReflectionCriterion>(reflectionCriteria.Distinct());
if (null == logger)
throw new ArgumentNullException("logger");
return new AssemblyLoader(
dirEnumArgs,
pathNameCriteriaSet,
reflectionCriteriaSet,
logger);
}
// this method is internal so that it can be accessed from unit tests, which only test the discovery
// process-- not the actual loading of assemblies.
internal List<string> DiscoverAssemblies()
{
try
{
if (dirEnumArgs.Count == 0)
throw new InvalidOperationException("Please specify a directory to search using the AddDirectory or AddRoot methods.");
AppDomain.CurrentDomain.ReflectionOnlyAssemblyResolve += CachedReflectionOnlyTypeResolver.OnReflectionOnlyAssemblyResolve;
// the following explicit loop ensures that the finally clause is invoked
// after we're done enumerating.
return EnumerateApprovedAssemblies();
}
finally
{
AppDomain.CurrentDomain.ReflectionOnlyAssemblyResolve -= CachedReflectionOnlyTypeResolver.OnReflectionOnlyAssemblyResolve;
}
}
private List<string> EnumerateApprovedAssemblies()
{
var assemblies = new List<string>();
foreach (var i in dirEnumArgs)
{
var pathName = i.Key;
var searchOption = i.Value;
if (!Directory.Exists(pathName))
{
logger.Warn(ErrorCode.Loader_DirNotFound, "Unable to find directory {0}; skipping.", pathName);
continue;
}
logger.Info(
searchOption == SearchOption.TopDirectoryOnly ?
"Searching for assemblies in {0}..." :
"Recursively searching for assemblies in {0}...",
pathName);
var candidates =
Directory.EnumerateFiles(pathName, "*.dll", searchOption)
.Select(Path.GetFullPath)
.Distinct()
.ToArray();
// This is a workaround for the behavior of ReflectionOnlyLoad/ReflectionOnlyLoadFrom
// that appear not to automatically resolve dependencies.
// We are trying to pre-load all dlls we find in the folder, so that if one of these
// assemblies happens to be a dependency of an assembly we later on call
// Assembly.DefinedTypes on, the dependency will be already loaded and will get
// automatically resolved. Ugly, but seems to solve the problem.
foreach (var j in candidates)
{
try
{
if (logger.IsVerbose) logger.Verbose("Trying to pre-load {0} to reflection-only context.", j);
Assembly.ReflectionOnlyLoadFrom(j);
}
catch (Exception)
{
if (logger.IsVerbose) logger.Verbose("Failed to pre-load assembly {0} in reflection-only context.", j);
}
}
foreach (var j in candidates)
{
if (AssemblyPassesLoadCriteria(j))
assemblies.Add(j);
}
}
return assemblies;
}
private bool ShouldExcludeAssembly(string pathName)
{
foreach (var criterion in pathNameCriteria)
{
IEnumerable<string> complaints;
bool shouldExclude;
try
{
shouldExclude = !criterion.EvaluateCandidate(pathName, out complaints);
}
catch (Exception ex)
{
complaints = ReportUnexpectedException(ex);
if (RethrowDiscoveryExceptions)
throw;
shouldExclude = true;
}
if (shouldExclude)
{
LogComplaints(pathName, complaints);
return true;
}
}
return false;
}
private static Assembly MatchWithLoadedAssembly(AssemblyName searchFor, IEnumerable<Assembly> assemblies)
{
foreach (var assembly in assemblies)
{
var searchForFullName = searchFor.FullName;
var candidateFullName = assembly.FullName;
if (String.Equals(candidateFullName, searchForFullName, StringComparison.OrdinalIgnoreCase))
{
return assembly;
}
}
return null;
}
private static Assembly MatchWithLoadedAssembly(AssemblyName searchFor, AppDomain appDomain)
{
return
MatchWithLoadedAssembly(searchFor, appDomain.GetAssemblies()) ??
MatchWithLoadedAssembly(searchFor, appDomain.ReflectionOnlyGetAssemblies());
}
private static Assembly MatchWithLoadedAssembly(AssemblyName searchFor)
{
return MatchWithLoadedAssembly(searchFor, AppDomain.CurrentDomain);
}
private static bool InterpretFileLoadException(string asmPathName, out string[] complaints)
{
var matched = MatchWithLoadedAssembly(AssemblyName.GetAssemblyName(asmPathName));
if (null == matched)
{
// something unexpected has occurred. rethrow until we know what we're catching.
complaints = null;
return false;
}
if (matched.Location != asmPathName)
{
complaints = new string[] {String.Format("A conflicting assembly has already been loaded from {0}.", matched.Location)};
// exception was anticipated.
return true;
}
// we've been asked to not log this because it's not indicative of a problem.
complaints = null;
//complaints = new string[] {"Assembly has already been loaded into current application domain."};
// exception was anticipated.
return true;
}
private string[] ReportUnexpectedException(Exception exception)
{
const string msg = "An unexpected exception occurred while attempting to load an assembly.";
logger.Error(ErrorCode.Loader_UnexpectedException, msg, exception);
return new string[] {msg};
}
private bool ReflectionOnlyLoadAssembly(string pathName, out Assembly assembly, out string[] complaints)
{
try
{
if (SimulateReflectionOnlyLoadFailure)
throw NewTestUnexpectedException();
assembly = Assembly.ReflectionOnlyLoadFrom(pathName);
if (!IsCompatibleWithCurrentProcess(assembly, out complaints))
{
assembly = null;
return false;
}
}
catch (BadImageFormatException)
{
complaints = new[] { "The image was not a CLR image." };
assembly = null;
return false;
}
catch (FileLoadException e)
{
assembly = null;
if (!InterpretFileLoadException(pathName, out complaints))
complaints = ReportUnexpectedException(e);
if (RethrowDiscoveryExceptions)
throw;
return false;
}
catch (Exception e)
{
assembly = null;
complaints = ReportUnexpectedException(e);
if (RethrowDiscoveryExceptions)
throw;
return false;
}
complaints = null;
return true;
}
private bool IsCompatibleWithCurrentProcess(Assembly assembly, out string[] complaints)
{
complaints = null;
ImageFileMachine machine;
PortableExecutableKinds peKind;
assembly.Modules.First().GetPEKind(out peKind, out machine);
if (peKind == PortableExecutableKinds.ILOnly && machine == ImageFileMachine.I386)
{
// anycpu
return true;
}
if (peKind.HasFlag(PortableExecutableKinds.NotAPortableExecutableImage) ||
peKind.HasFlag(PortableExecutableKinds.Unmanaged32Bit))
{
// this block of code should never run since the assembly was successfully loaded
throw new InvalidOperationException("Unexpected block of code reached");
}
if ((peKind.HasFlag(PortableExecutableKinds.Required32Bit) && Environment.Is64BitProcess) ||
(peKind.HasFlag(PortableExecutableKinds.PE32Plus) && !Environment.Is64BitProcess))
{
// targets wrong bitness
complaints = new[] { $"The assembly {assembly.FullName} is compiled for a different platform than the running process" };
return false;
}
if (machine != this.exeInfo.Value.MachineType)
{
complaints = new[] { $"The assembly {assembly.FullName} was compiled for {machine} but the current exe was compiled for {this.exeInfo.Value.MachineType}" };
return false;
}
return true;
}
private void LogComplaint(string pathName, string complaint)
{
LogComplaints(pathName, new string[] { complaint });
}
private void LogComplaints(string pathName, IEnumerable<string> complaints)
{
var distinctComplaints = complaints.Distinct();
// generate feedback so that the operator can determine why her DLL didn't load.
var msg = new StringBuilder();
string bullet = Environment.NewLine + "\t* ";
msg.Append(String.Format("User assembly ignored: {0}", pathName));
int count = 0;
foreach (var i in distinctComplaints)
{
msg.Append(bullet);
msg.Append(i);
++count;
}
if (0 == count)
throw new InvalidOperationException("No complaint provided for assembly.");
// we can't use an error code here because we want each log message to be displayed.
logger.Info(msg.ToString());
}
private static ExeImageInfo LoadImageInformation()
{
PortableExecutableKinds peKind;
ImageFileMachine machine;
Assembly.GetEntryAssembly().Modules.First().GetPEKind(out peKind, out machine);
return new ExeImageInfo {PEKind = peKind, MachineType = machine};
}
private static AggregateException NewTestUnexpectedException()
{
var inner = new Exception[] { new OrleansException("Inner Exception #1"), new OrleansException("Inner Exception #2") };
return new AggregateException("Unexpected AssemblyLoader Exception Used for Unit Tests", inner);
}
private bool ShouldLoadAssembly(string pathName)
{
Assembly assembly;
string[] loadComplaints;
if (!ReflectionOnlyLoadAssembly(pathName, out assembly, out loadComplaints))
{
if (loadComplaints == null || loadComplaints.Length == 0)
return false;
LogComplaints(pathName, loadComplaints);
return false;
}
if (assembly.IsDynamic)
{
LogComplaint(pathName, "Assembly is dynamic (not supported).");
return false;
}
var criteriaComplaints = new List<string>();
foreach (var i in reflectionCriteria)
{
IEnumerable<string> complaints;
try
{
if (SimulateLoadCriteriaFailure)
throw NewTestUnexpectedException();
if (i.EvaluateCandidate(assembly, out complaints))
return true;
}
catch (Exception ex)
{
complaints = ReportUnexpectedException(ex);
if (RethrowDiscoveryExceptions)
throw;
}
criteriaComplaints.AddRange(complaints);
}
LogComplaints(pathName, criteriaComplaints);
return false;
}
private bool AssemblyPassesLoadCriteria(string pathName)
{
return !ShouldExcludeAssembly(pathName) && ShouldLoadAssembly(pathName);
}
private class ExeImageInfo
{
public PortableExecutableKinds PEKind;
public ImageFileMachine MachineType;
}
#endif
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void AndSingle()
{
var test = new SimpleBinaryOpTest__AndSingle();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
}
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
}
// Validates passing a static member works
test.RunClsVarScenario();
if (Avx.IsSupported)
{
// Validates passing a static member works, using pinning and Load
test.RunClsVarScenario_Load();
}
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
if (Avx.IsSupported)
{
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
}
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
if (Avx.IsSupported)
{
// Validates passing the field of a local class works, using pinning and Load
test.RunClassLclFldScenario_Load();
}
// Validates passing an instance member of a class works
test.RunClassFldScenario();
if (Avx.IsSupported)
{
// Validates passing an instance member of a class works, using pinning and Load
test.RunClassFldScenario_Load();
}
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
if (Avx.IsSupported)
{
// Validates passing the field of a local struct works, using pinning and Load
test.RunStructLclFldScenario_Load();
}
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
if (Avx.IsSupported)
{
// Validates passing an instance member of a struct works, using pinning and Load
test.RunStructFldScenario_Load();
}
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__AndSingle
{
private struct DataTable
{
private byte[] inArray1;
private byte[] inArray2;
private byte[] outArray;
private GCHandle inHandle1;
private GCHandle inHandle2;
private GCHandle outHandle;
private ulong alignment;
public DataTable(Single[] inArray1, Single[] inArray2, Single[] outArray, int alignment)
{
int sizeOfinArray1 = inArray1.Length * Unsafe.SizeOf<Single>();
int sizeOfinArray2 = inArray2.Length * Unsafe.SizeOf<Single>();
int sizeOfoutArray = outArray.Length * Unsafe.SizeOf<Single>();
if ((alignment != 32 && alignment != 16) || (alignment * 2) < sizeOfinArray1 || (alignment * 2) < sizeOfinArray2 || (alignment * 2) < sizeOfoutArray)
{
throw new ArgumentException("Invalid value of alignment");
}
this.inArray1 = new byte[alignment * 2];
this.inArray2 = new byte[alignment * 2];
this.outArray = new byte[alignment * 2];
this.inHandle1 = GCHandle.Alloc(this.inArray1, GCHandleType.Pinned);
this.inHandle2 = GCHandle.Alloc(this.inArray2, GCHandleType.Pinned);
this.outHandle = GCHandle.Alloc(this.outArray, GCHandleType.Pinned);
this.alignment = (ulong)alignment;
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray1Ptr), ref Unsafe.As<Single, byte>(ref inArray1[0]), (uint)sizeOfinArray1);
Unsafe.CopyBlockUnaligned(ref Unsafe.AsRef<byte>(inArray2Ptr), ref Unsafe.As<Single, byte>(ref inArray2[0]), (uint)sizeOfinArray2);
}
public void* inArray1Ptr => Align((byte*)(inHandle1.AddrOfPinnedObject().ToPointer()), alignment);
public void* inArray2Ptr => Align((byte*)(inHandle2.AddrOfPinnedObject().ToPointer()), alignment);
public void* outArrayPtr => Align((byte*)(outHandle.AddrOfPinnedObject().ToPointer()), alignment);
public void Dispose()
{
inHandle1.Free();
inHandle2.Free();
outHandle.Free();
}
private static unsafe void* Align(byte* buffer, ulong expectedAlignment)
{
return (void*)(((ulong)buffer + expectedAlignment - 1) & ~(expectedAlignment - 1));
}
}
private struct TestStruct
{
public Vector256<Single> _fld1;
public Vector256<Single> _fld2;
public static TestStruct Create()
{
var testStruct = new TestStruct();
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref testStruct._fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
return testStruct;
}
public void RunStructFldScenario(SimpleBinaryOpTest__AndSingle testClass)
{
var result = Avx.And(_fld1, _fld2);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
public void RunStructFldScenario_Load(SimpleBinaryOpTest__AndSingle testClass)
{
fixed (Vector256<Single>* pFld1 = &_fld1)
fixed (Vector256<Single>* pFld2 = &_fld2)
{
var result = Avx.And(
Avx.LoadVector256((Single*)(pFld1)),
Avx.LoadVector256((Single*)(pFld2))
);
Unsafe.Write(testClass._dataTable.outArrayPtr, result);
testClass.ValidateResult(_fld1, _fld2, testClass._dataTable.outArrayPtr);
}
}
}
private static readonly int LargestVectorSize = 32;
private static readonly int Op1ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static readonly int Op2ElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static readonly int RetElementCount = Unsafe.SizeOf<Vector256<Single>>() / sizeof(Single);
private static Single[] _data1 = new Single[Op1ElementCount];
private static Single[] _data2 = new Single[Op2ElementCount];
private static Vector256<Single> _clsVar1;
private static Vector256<Single> _clsVar2;
private Vector256<Single> _fld1;
private Vector256<Single> _fld2;
private DataTable _dataTable;
static SimpleBinaryOpTest__AndSingle()
{
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _clsVar2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
}
public SimpleBinaryOpTest__AndSingle()
{
Succeeded = true;
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld1), ref Unsafe.As<Single, byte>(ref _data1[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Single>, byte>(ref _fld2), ref Unsafe.As<Single, byte>(ref _data2[0]), (uint)Unsafe.SizeOf<Vector256<Single>>());
for (var i = 0; i < Op1ElementCount; i++) { _data1[i] = TestLibrary.Generator.GetSingle(); }
for (var i = 0; i < Op2ElementCount; i++) { _data2[i] = TestLibrary.Generator.GetSingle(); }
_dataTable = new DataTable(_data1, _data2, new Single[RetElementCount], LargestVectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Avx.And(
Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_Load));
var result = Avx.And(
Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_LoadAligned));
var result = Avx.And(
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Avx).GetMethod(nameof(Avx.And), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_Load));
var result = typeof(Avx).GetMethod(nameof(Avx.And), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>) })
.Invoke(null, new object[] {
Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_LoadAligned));
var result = typeof(Avx).GetMethod(nameof(Avx.And), new Type[] { typeof(Vector256<Single>), typeof(Vector256<Single>) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Single>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Avx.And(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunClsVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario_Load));
fixed (Vector256<Single>* pClsVar1 = &_clsVar1)
fixed (Vector256<Single>* pClsVar2 = &_clsVar2)
{
var result = Avx.And(
Avx.LoadVector256((Single*)(pClsVar1)),
Avx.LoadVector256((Single*)(pClsVar2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var op1 = Unsafe.Read<Vector256<Single>>(_dataTable.inArray1Ptr);
var op2 = Unsafe.Read<Vector256<Single>>(_dataTable.inArray2Ptr);
var result = Avx.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_Load));
var op1 = Avx.LoadVector256((Single*)(_dataTable.inArray1Ptr));
var op2 = Avx.LoadVector256((Single*)(_dataTable.inArray2Ptr));
var result = Avx.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_LoadAligned));
var op1 = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray1Ptr));
var op2 = Avx.LoadAlignedVector256((Single*)(_dataTable.inArray2Ptr));
var result = Avx.And(op1, op2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(op1, op2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new SimpleBinaryOpTest__AndSingle();
var result = Avx.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunClassLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario_Load));
var test = new SimpleBinaryOpTest__AndSingle();
fixed (Vector256<Single>* pFld1 = &test._fld1)
fixed (Vector256<Single>* pFld2 = &test._fld2)
{
var result = Avx.And(
Avx.LoadVector256((Single*)(pFld1)),
Avx.LoadVector256((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Avx.And(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunClassFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario_Load));
fixed (Vector256<Single>* pFld1 = &_fld1)
fixed (Vector256<Single>* pFld2 = &_fld2)
{
var result = Avx.And(
Avx.LoadVector256((Single*)(pFld1)),
Avx.LoadVector256((Single*)(pFld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Avx.And(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructLclFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario_Load));
var test = TestStruct.Create();
var result = Avx.And(
Avx.LoadVector256((Single*)(&test._fld1)),
Avx.LoadVector256((Single*)(&test._fld2))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunStructFldScenario_Load()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario_Load));
var test = TestStruct.Create();
test.RunStructFldScenario_Load(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(Vector256<Single> op1, Vector256<Single> op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), op1);
Unsafe.WriteUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), op2);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* op1, void* op2, void* result, [CallerMemberName] string method = "")
{
Single[] inArray1 = new Single[Op1ElementCount];
Single[] inArray2 = new Single[Op2ElementCount];
Single[] outArray = new Single[RetElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(op1), (uint)Unsafe.SizeOf<Vector256<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(op2), (uint)Unsafe.SizeOf<Vector256<Single>>());
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Single, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), (uint)Unsafe.SizeOf<Vector256<Single>>());
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Single[] left, Single[] right, Single[] result, [CallerMemberName] string method = "")
{
bool succeeded = true;
if ((BitConverter.SingleToInt32Bits(left[0]) & BitConverter.SingleToInt32Bits(right[0])) != BitConverter.SingleToInt32Bits(result[0]))
{
succeeded = false;
}
else
{
for (var i = 1; i < RetElementCount; i++)
{
if ((BitConverter.SingleToInt32Bits(left[i]) & BitConverter.SingleToInt32Bits(right[i])) != BitConverter.SingleToInt32Bits(result[i]))
{
succeeded = false;
break;
}
}
}
if (!succeeded)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Avx)}.{nameof(Avx.And)}<Single>(Vector256<Single>, Vector256<Single>): {method} failed:");
TestLibrary.TestFramework.LogInformation($" left: ({string.Join(", ", left)})");
TestLibrary.TestFramework.LogInformation($" right: ({string.Join(", ", right)})");
TestLibrary.TestFramework.LogInformation($" result: ({string.Join(", ", result)})");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Copyright (c) 2007, Clarius Consulting, Manas Technology Solutions, InSTEDD.
// All rights reserved. Licensed under the BSD 3-Clause License; see License.txt.
using System;
using System.Collections.Generic;
using System.Linq.Expressions;
using System.Reflection;
using Xunit;
#if FEATURE_SERIALIZATION
using System.Runtime.Serialization;
#endif
namespace Moq.Tests
{
public class MockFixture
{
[Fact]
public void CreatesMockAndExposesInterface()
{
var mock = new Mock<IComparable>();
IComparable comparable = mock.Object;
Assert.NotNull(comparable);
}
[Fact]
public void CanBeNamedForEasierDebugging()
{
var mock = new Mock<IComparable>();
mock.Name = "my mock";
Assert.Equal("my mock", mock.ToString());
}
[Fact]
public void HasADefaultNameThatIsUnique()
{
var mock = new Mock<IComparable>();
var mock2 = new Mock<IComparable>();
Assert.NotEqual(mock.ToString(), mock2.ToString());
}
[Fact]
public void HasADefaultNameThatIncludesItsTypeAndThatItsAMock()
{
var mock = new Mock<IComparable>();
Assert.Contains("IComparable", mock.ToString());
Assert.Contains("mock", mock.ToString().ToLower());
}
[Fact]
public void HasADefaultNameThatIncludesItsGenericParameters()
{
var mock = new Mock<Dictionary<int, string>>();
Assert.Contains("Dictionary<int, string>", mock.ToString());
}
[Fact]
public void PassesItsNameOnToTheResultingMockObjectWhenMockingInterfaces()
{
var mock = new Mock<IComparable>();
Assert.Equal(mock.ToString() + ".Object", mock.Object.ToString());
}
[Fact]
public void PassesItsNameOnToTheResultingMockObjectWhenMockingClasses()
{
var mock = new Mock<Foo>();
Assert.Equal(mock.ToString() + ".Object", mock.Object.ToString());
}
public class ToStringOverrider
{
public override string ToString()
{
return "real value";
}
}
[Fact]
public void OverriddenToStringMethodsCallUnderlyingImplementationInPartialMocks()
{
var partialMock = new Mock<ToStringOverrider>() { CallBase = true };
Assert.Equal("real value", partialMock.Object.ToString());
}
[Fact]
public void OverriddenToStringMethodsAreStubbedWithDefaultValuesInFullMocks()
{
var fullMock = new Mock<ToStringOverrider>();
Assert.Null(fullMock.Object.ToString());
}
[Fact]
public void DefaultValue_can_be_set_to_Empty()
{
var mock = new Mock<object>();
mock.DefaultValue = DefaultValue.Empty;
Assert.Equal(DefaultValue.Empty, mock.DefaultValue);
}
[Fact]
public void DefaultValue_can_be_set_to_Mock()
{
var mock = new Mock<object>();
mock.DefaultValue = DefaultValue.Mock;
Assert.Equal(DefaultValue.Mock, mock.DefaultValue);
}
[Theory]
[InlineData(DefaultValue.Custom)]
[InlineData((DefaultValue)(-1))]
public void DefaultValue_cannot_be_set_to_anything_other_than_Empty_or_Mock(DefaultValue defaultValue)
{
var mock = new Mock<object>();
Assert.Throws<ArgumentOutOfRangeException>(() =>
{
mock.DefaultValue = defaultValue;
});
}
[Fact]
public void DefaultValue_when_Empty_then_DefaultValueProvider_has_Kind_Empty()
{
var mock = new Mock<object>();
mock.DefaultValue = DefaultValue.Empty;
Assert.Equal(DefaultValue.Empty, mock.DefaultValueProvider.Kind);
}
[Fact]
public void DefaultValue_when_Mock_then_DefaultValueProvider_has_Kind_Mock()
{
var mock = new Mock<object>();
mock.DefaultValue = DefaultValue.Mock;
Assert.Equal(DefaultValue.Mock, mock.DefaultValueProvider.Kind);
}
[Fact]
public void DefaultValueProvider_cannot_be_set_to_null()
{
var mock = new Mock<object>();
Assert.Throws<ArgumentNullException>(() =>
{
mock.DefaultValueProvider = null;
});
}
[Fact]
public void DefaultValueProvider_when_custom_provider_then_DefaultValue_is_Custom()
{
var mock = new Mock<object>();
var customDefaultValueProvider = new Mock<DefaultValueProvider>() { CallBase = true }.Object;
mock.DefaultValueProvider = customDefaultValueProvider;
Assert.Equal(DefaultValue.Custom, mock.DefaultValue);
}
[Fact]
public void ThrowsIfNullExpectAction()
{
var mock = new Mock<IComparable>();
Assert.Throws<ArgumentNullException>(() => mock.Setup((Expression<Action<IComparable>>)null));
}
[Fact]
public void ThrowsIfNullExpectFunction()
{
var mock = new Mock<IComparable>();
Assert.Throws<ArgumentNullException>(() => mock.Setup((Expression<Func<IComparable, string>>)null));
}
[Fact]
public void ThrowsIfExpectationSetForField()
{
var mock = new Mock<FooBase>();
Assert.Throws<ArgumentException>(() => mock.Setup(x => x.ValueField));
}
[Fact]
public void CallParameterCanBeVariable()
{
int value = 5;
var mock = new Mock<IFoo>();
mock.Setup(x => x.Echo(value)).Returns(() => value * 2);
Assert.Equal(value * 2, mock.Object.Echo(value));
}
[Fact]
public void CallParameterCanBeMethodCall()
{
int value = 5;
var mock = new Mock<IFoo>();
mock.Setup(x => x.Echo(GetValue(value))).Returns(() => value * 2);
Assert.Equal(value * 2, mock.Object.Echo(value * 2));
}
private int GetValue(int value)
{
return value * 2;
}
[Fact]
public void ExpectsVoidCall()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.Submit());
mock.Object.Submit();
}
[Fact]
public void ThrowsIfExpectationThrows()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.Submit()).Throws(new FormatException());
Assert.Throws<FormatException>(() => mock.Object.Submit());
}
[Fact]
public void ThrowsIfExpectationThrowsWithGenericsExceptionType()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.Submit()).Throws<FormatException>();
Assert.Throws<FormatException>(() => mock.Object.Submit());
}
[Fact]
public void ReturnsServiceFromServiceProvider()
{
var provider = new Mock<IServiceProvider>();
provider.Setup(x => x.GetService(typeof(IFooService))).Returns(new FooService());
Assert.True(provider.Object.GetService(typeof(IFooService)) is FooService);
}
[Fact]
public void InvokesLastExpectationThatMatches()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.Execute(It.IsAny<string>())).Throws(new ArgumentException());
mock.Setup(x => x.Execute("ping")).Returns("I'm alive!");
Assert.Equal("I'm alive!", mock.Object.Execute("ping"));
Assert.Throws<ArgumentException>(() => mock.Object.Execute("asdf"));
}
[Fact]
public void MockObjectIsAssignableToMockedInterface()
{
var mock = new Mock<IFoo>();
Assert.True(typeof(IFoo).IsAssignableFrom(mock.Object.GetType()));
}
[Fact]
public void MockObjectsEqualityIsReferenceEquals()
{
var mock1 = new Mock<IFoo>();
var mock2 = new Mock<IFoo>();
Assert.True(mock1.Object.Equals(mock1.Object));
Assert.False(mock1.Object.Equals(mock2.Object));
}
[Fact]
public void HashCodeIsDifferentForEachMock()
{
var mock1 = new Mock<IFoo>();
var mock2 = new Mock<IFoo>();
Assert.Equal(mock1.Object.GetHashCode(), mock1.Object.GetHashCode());
Assert.Equal(mock2.Object.GetHashCode(), mock2.Object.GetHashCode());
Assert.NotEqual(mock1.Object.GetHashCode(), mock2.Object.GetHashCode());
}
[Fact]
public void ToStringIsNotNullOrEmpty()
{
var mock = new Mock<IFoo>();
Assert.False(String.IsNullOrEmpty(mock.Object.ToString()));
}
[Fact]
public void OverridesObjectMethods()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.GetHashCode()).Returns(1);
mock.Setup(x => x.ToString()).Returns("foo");
mock.Setup(x => x.Equals(It.IsAny<object>())).Returns(true);
Assert.Equal("foo", mock.Object.ToString());
Assert.Equal(1, mock.Object.GetHashCode());
Assert.True(mock.Object.Equals(new object()));
}
[Fact]
public void OverridesBehaviorFromAbstractClass()
{
var mock = new Mock<FooBase>();
mock.CallBase = true;
mock.Setup(x => x.Check("foo")).Returns(false);
Assert.False(mock.Object.Check("foo"));
Assert.True(mock.Object.Check("bar"));
}
[Fact]
public void CanSetupToString()
{
var mock = new Mock<Foo>();
mock.Setup(x => x.ToString()).Returns("This is me");
Assert.Equal("This is me", mock.Object.ToString());
}
[Fact]
public void CanSetupToStringForInterface()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.ToString()).Returns("This is me");
Assert.Equal("This is me", mock.Object.ToString());
}
[Fact]
public void CanSetupGetHashCode()
{
var mock = new Mock<Foo>();
mock.Setup(x => x.GetHashCode()).Returns(527);
Assert.Equal(527, mock.Object.GetHashCode());
}
[Fact]
public void CanSetupGetHashCodeForInterface()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.GetHashCode()).Returns(527);
Assert.Equal(527, mock.Object.GetHashCode());
}
[Fact]
public void CanSetupObjectEquals()
{
var mock = new Mock<Foo>();
mock.Setup(x => x.Equals(It.IsAny<object>())).Returns<object>((obj) => false);
var foo = mock.Object;
Assert.True(!foo.Equals(foo));
}
[Fact]
public void CanSetupObjectEqualsForInterface()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.Equals(It.IsAny<object>())).Returns<object>((obj) => false);
var foo = mock.Object;
Assert.True(!foo.Equals(foo));
}
[Fact]
public void CallsUnderlyingClassEquals()
{
var mock = new Mock<FooOverrideEquals>();
var mock2 = new Mock<FooOverrideEquals>();
mock.CallBase = true;
mock.Object.Name = "Foo";
mock2.Object.Name = "Foo";
Assert.True(mock.Object.Equals(mock2.Object));
}
[Fact]
public void ThrowsIfSealedClass()
{
Assert.Throws<NotSupportedException>(() => new Mock<FooSealed>());
}
[Fact]
public void ThrowsIfExpectOnNonVirtual()
{
var mock = new Mock<FooBase>();
Assert.Throws<NotSupportedException>(() => mock.Setup(x => x.True()).Returns(false));
}
[Fact]
public void OverridesPreviousExpectation()
{
var mock = new Mock<IFoo>();
mock.Setup(x => x.Echo(1)).Returns(5);
Assert.Equal(5, mock.Object.Echo(1));
mock.Setup(x => x.Echo(1)).Returns(10);
Assert.Equal(10, mock.Object.Echo(1));
}
[Fact]
public void ConstructsObjectsWithCtorArguments()
{
var mock = new Mock<FooWithConstructors>(MockBehavior.Default, "Hello", 26);
Assert.Equal("Hello", mock.Object.StringValue);
Assert.Equal(26, mock.Object.IntValue);
// Should also construct without args.
mock = new Mock<FooWithConstructors>(MockBehavior.Default);
Assert.Null(mock.Object.StringValue);
Assert.Equal(0, mock.Object.IntValue);
}
[Fact]
public void ConstructsClassWithNoDefaultConstructor()
{
var mock = new Mock<ClassWithNoDefaultConstructor>(MockBehavior.Default, "Hello", 26);
Assert.Equal("Hello", mock.Object.StringValue);
Assert.Equal(26, mock.Object.IntValue);
}
[Fact]
public void ConstructsClassWithNoDefaultConstructorAndNullValue()
{
var mock = new Mock<ClassWithNoDefaultConstructor>(MockBehavior.Default, null, 26);
Assert.Null(mock.Object.StringValue);
Assert.Equal(26, mock.Object.IntValue);
}
[Fact]
public void ThrowsIfNoMatchingConstructorFound()
{
try
{
Console.WriteLine(new Mock<ClassWithNoDefaultConstructor>(25, true).Object);
Assert.True(false, "Should have thrown an exception since constructor does not exist.");
}
catch (Exception)
{
}
}
[Fact]
public void ThrowsIfArgumentsPassedForInterface()
{
Assert.Throws<ArgumentException>(() => new Mock<IFoo>(25, true));
}
[Fact]
public void ThrowsOnStrictWithExpectButNoReturns()
{
var mock = new Mock<IFoo>(MockBehavior.Strict);
mock.Setup(x => x.Execute("ping"));
try
{
mock.Object.Execute("ping");
Assert.True(false, "SHould throw");
}
catch (MockException mex)
{
Assert.Equal(MockExceptionReasons.ReturnValueRequired, mex.Reasons);
}
}
[Fact]
public void AllowsSetupNewHiddenProperties()
{
var value = new Mock<INewBar>().Object;
var target = new Mock<INewFoo>();
target.As<IFoo>().SetupGet(x => x.Bar).Returns(value);
target.Setup(x => x.Bar).Returns(value);
Assert.Equal(target.Object.Bar, target.As<IFoo>().Object.Bar);
}
[Fact]
public void AllowsSetupNewHiddenBaseProperty()
{
var value = new Mock<INewBar>().Object;
var target = new Mock<INewFoo>();
target.As<IFoo>().SetupGet(x => x.Bar).Returns(value);
Assert.Equal(value, target.As<IFoo>().Object.Bar);
Assert.Null(target.Object.Bar);
}
[Fact]
public void AllowsSetupNewHiddenInheritedProperty()
{
var value = new Mock<INewBar>().Object;
var target = new Mock<INewFoo>();
target.As<IFoo>();
target.SetupGet(x => x.Bar).Returns(value);
Assert.Equal(value, target.Object.Bar);
Assert.Null(target.As<IFoo>().Object.Bar);
}
[Fact]
public void ExpectsPropertySetter()
{
var mock = new Mock<IFoo>();
int? value = 0;
mock.SetupSet(foo => foo.Value = It.IsAny<int?>())
.Callback<int?>(i => value = i);
mock.Object.Value = 5;
Assert.Equal(5, value);
}
[Fact]
public void ExpectsPropertySetterLambda()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Count = 5);
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 6;
ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 5;
mock.VerifyAll();
}
[Fact]
public void CallbackReceivesValueWithPropertySetterLambda()
{
var mock = new Mock<IFoo>();
int value = 0;
int value2 = 0;
mock.SetupSet(foo => foo.Count = 6).Callback<int>(v => value = v);
mock.SetupSet<int>(foo => foo.Count = 3).Callback(v => value2 = v);
mock.Object.Count = 6;
Assert.Equal(6, value);
Assert.Equal(0, value2);
mock.Object.Count = 3;
Assert.Equal(3, value2);
}
[Fact]
public void SetterLambdaUsesItIsAnyMatcher()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Count = It.IsAny<int>());
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 6;
mock.VerifyAll();
}
[Fact]
public void SetterLambdaUsesItIsInRangeMatcher()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Count = It.IsInRange(1, 5, Range.Inclusive));
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 6;
ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 5;
mock.VerifyAll();
}
[Fact]
public void SetterLambdaUsesItIsPredicateMatcher()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Count = It.Is<int>(v => v % 2 == 0));
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 7;
ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 4;
mock.VerifyAll();
}
[Fact]
public void SetterLambdaCannotHaveMultipleSetups()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Count = It.IsAny<int>())
.Throws<ArgumentOutOfRangeException>();
Assert.Throws<ArgumentOutOfRangeException>(() => mock.Object.Count = 5);
mock.SetupSet(foo => foo.Count = It.IsInRange(1, 5, Range.Inclusive))
.Throws<ArgumentException>();
Assert.Throws<ArgumentException>(() => mock.Object.Count = 5);
}
[Fact]
public void SetterLambdaDoesNotCountAsInvocation()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Count = 5);
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Count = 5;
mock.VerifyAll();
}
[Fact]
public void SetterLambdaWithStrictMockWorks()
{
var mock = new Mock<IFoo>(MockBehavior.Strict);
mock.SetupSet(foo => foo.Count = 5);
}
[Fact]
public void ShouldAllowMultipleCustomMatcherWithArguments()
{
var mock = new Mock<IFoo>();
mock.Setup(m => m.Echo(IsMultipleOf(2))).Returns(2);
mock.Setup(m => m.Echo(IsMultipleOf(3))).Returns(3);
Assert.Equal(2, mock.Object.Echo(4));
Assert.Equal(2, mock.Object.Echo(8));
Assert.Equal(3, mock.Object.Echo(9));
Assert.Equal(3, mock.Object.Echo(3));
}
private int IsMultipleOf(int value)
{
return Match.Create<int>(i => i % value == 0);
}
[Fact]
public void ExpectsPropertySetterLambdaCoercesNullable()
{
var mock = new Mock<IFoo>();
mock.SetupSet(foo => foo.Value = 5);
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Value = 6;
ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Value = 5;
mock.VerifyAll();
}
[Fact]
public void ExpectsPropertySetterLambdaValueReference()
{
var mock = new Mock<IFoo>();
var obj = new object();
mock.SetupSet(foo => foo.Object = obj);
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Object = new object();
ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Object = obj;
mock.VerifyAll();
}
[Fact]
public void ExpectsPropertySetterLambdaRecursive()
{
var mock = new Mock<IFoo>();
mock.SetupSet<string>(foo => foo.Bar.Value = "bar");
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Bar.Value = "bar";
mock.VerifyAll();
}
[Fact]
public void ExpectsPropertySetterWithNullValue()
{
var mock = new Mock<IFoo>(MockBehavior.Strict);
mock.SetupSet(m => m.Value = null);
Assert.Throws<MockException>(() => { mock.Object.Value = 5; });
var ex = Assert.Throws<MockException>(() => mock.VerifyAll());
Assert.True(ex.IsVerificationError);
mock.Object.Value = null;
mock.VerifyAll();
mock.VerifySet(m => m.Value = It.IsAny<int?>());
}
[Fact]
public void ThrowsIfPropertySetterWithWrongValue()
{
var mock = new Mock<IFoo>(MockBehavior.Strict);
mock.SetupSet(m => m.Value = 5);
Assert.Throws<MockException>(() => { mock.Object.Value = 6; });
}
[Fact]
public void ExpectsPropertyGetter()
{
var mock = new Mock<IFoo>();
bool called = false;
mock.SetupGet(x => x.Value)
.Callback(() => called = true)
.Returns(25);
Assert.Equal(25, mock.Object.Value);
Assert.True(called);
}
[Fact]
public void ThrowsIfExpectPropertySetterOnMethod()
{
var mock = new Mock<IFoo>();
Assert.Throws<ArgumentException>(() => mock.SetupSet(foo => foo.Echo(5)));
}
[Fact]
public void ThrowsIfExpectPropertyGetterOnMethod()
{
var mock = new Mock<IFoo>();
Assert.Throws<ArgumentException>(() => mock.SetupGet(foo => foo.Echo(5)));
}
[Fact]
public void DoesNotCallBaseClassVirtualImplementationByDefault()
{
var mock = new Mock<FooBase>();
Assert.False(mock.Object.BaseCalled);
mock.Object.BaseCall();
Assert.False(mock.Object.BaseCalled);
}
[Fact]
public void DoesNotCallBaseClassVirtualImplementationIfSpecified()
{
var mock = new Mock<FooBase>();
mock.CallBase = false;
Assert.False(mock.Object.BaseCalled);
mock.Object.BaseCall();
Assert.Equal(default(bool), mock.Object.BaseCall("foo"));
Assert.False(mock.Object.BaseCalled);
}
[Fact]
public void ExpectsGetIndexedProperty()
{
var mock = new Mock<IFoo>();
mock.SetupGet(foo => foo[0])
.Returns(1);
mock.SetupGet(foo => foo[1])
.Returns(2);
Assert.Equal(1, mock.Object[0]);
Assert.Equal(2, mock.Object[1]);
}
[Fact]
public void ExpectAndExpectGetAreSynonyms()
{
var mock = new Mock<IFoo>();
mock.SetupGet(foo => foo.Value)
.Returns(1);
mock.Setup(foo => foo.Value)
.Returns(2);
Assert.Equal(2, mock.Object.Value);
}
[Fact]
public void ThrowsIfExpecationSetForNotOverridableMember()
{
var target = new Mock<Doer>();
Assert.Throws<NotSupportedException>(() => target.Setup(t => t.Do()));
}
[Fact]
public void ExpectWithParamArrayEmptyMatchArguments()
{
string expected = "bar";
string argument = "foo";
var target = new Mock<IParams>();
target.Setup(x => x.ExecuteByName(argument)).Returns(expected);
string actual = target.Object.ExecuteByName(argument);
Assert.Equal(expected, actual);
}
[Fact]
public void ExpectWithParamArrayNotMatchDifferentLengthInArguments()
{
string notExpected = "bar";
string argument = "foo";
var target = new Mock<IParams>();
target.Setup(x => x.ExecuteParams(argument, It.IsAny<string>())).Returns(notExpected);
string actual = target.Object.ExecuteParams(argument);
Assert.NotEqual(notExpected, actual);
}
[Fact]
public void ExpectWithParamArrayMatchArguments()
{
string expected = "bar";
string argument = "foo";
var target = new Mock<IParams>();
target.Setup(x => x.ExecuteParams(argument, It.IsAny<string>())).Returns(expected);
string ret = target.Object.ExecuteParams(argument, "baz");
Assert.Equal(expected, ret);
}
[Fact]
public void ExpectWithArrayNotMatchTwoDifferentArrayInstances()
{
string expected = "bar";
string argument = "foo";
var target = new Mock<IParams>();
target.Setup(x => x.ExecuteArray(new string[] { argument, It.IsAny<string>() })).Returns(expected);
string ret = target.Object.ExecuteArray(new string[] { argument, "baz" });
Assert.Null(ret);
}
[Fact]
public void ExpectGetAndExpectSetMatchArguments()
{
var target = new Mock<IFoo>();
target.SetupGet(d => d.Value).Returns(1);
target.SetupSet(d => d.Value = 2);
target.Object.Value = target.Object.Value + 1;
target.VerifyAll();
}
[Fact]
public void ArgumentNullMatchProperCtor()
{
var target = new Mock<Foo>(null);
Assert.Null(target.Object.Bar);
}
[Fact]
public void DistinguishesSameMethodsWithDifferentGenericArguments()
{
var mock = new Mock<FooBase>();
mock.Setup(foo => foo.Generic<int>()).Returns(2);
mock.Setup(foo => foo.Generic<string>()).Returns(3);
Assert.Equal(2, mock.Object.Generic<int>());
Assert.Equal(3, mock.Object.Generic<string>());
}
/// <summary>
/// Mostly testing that these casts compile, but also that there are no runtime failures.
/// </summary>
[Fact]
public void CanBeCastToIMockWithCovariance()
{
var mock = new Mock<INewBar>();
mock.Setup(x => x.Value).Returns("bar");
var foo = MakeFoo(mock);
Assert.Equal("bar", foo.Bar.Value);
mock.Verify(x => x.Value, Times.Once());
}
private static Foo MakeFoo(IMock<IBar> barMock)
{
return new Foo(barMock.Object);
}
[Fact]
public void Mock_initially_uses_default_switches()
{
var mock = new Mock<IFoo>();
Assert.Equal(Switches.Default, actual: mock.Switches);
}
public class Foo
{
public Foo() : this(new Bar()) { }
public Foo(IBar bar)
{
this.Bar = bar;
}
public IBar Bar { get; private set; }
}
public class Bar : IBar
{
public string Value { get; set; }
}
public interface IBar
{
string Value { get; set; }
}
interface IDo { void Do(); }
public class Doer : IDo
{
public void Do()
{
}
}
public sealed class FooSealed { }
class FooService : IFooService { }
interface IFooService { }
public class FooWithPrivateSetter
{
public virtual string Foo { get; private set; }
}
public class ClassWithNoDefaultConstructor
{
public ClassWithNoDefaultConstructor(string stringValue, int intValue)
{
this.StringValue = stringValue;
this.IntValue = intValue;
}
public string StringValue { get; set; }
public int IntValue { get; set; }
}
public abstract class FooWithConstructors
{
public FooWithConstructors(string stringValue, int intValue)
{
this.StringValue = stringValue;
this.IntValue = intValue;
}
public FooWithConstructors()
{
}
public override string ToString()
{
return base.ToString();
}
public string StringValue { get; set; }
public int IntValue { get; set; }
}
public class FooOverrideEquals
{
public string Name { get; set; }
public override bool Equals(object obj)
{
return (obj is FooOverrideEquals) &&
((FooOverrideEquals)obj).Name == this.Name;
}
public override int GetHashCode()
{
return Name.GetHashCode();
}
}
public interface IFoo
{
object Object { get; set; }
IBar Bar { get; set; }
int Count { set; }
int? Value { get; set; }
int Echo(int value);
void Submit();
string Execute(string command);
int this[int index] { get; set; }
}
public interface IParams
{
string ExecuteByName(string name, params object[] args);
string ExecuteParams(params string[] args);
string ExecuteArray(string[] args);
}
public abstract class FooBase
{
public int ValueField;
public abstract void Do(int value);
public virtual bool Check(string value)
{
return true;
}
public bool GetIsProtected()
{
return IsProtected();
}
protected virtual bool IsProtected()
{
return true;
}
public bool True()
{
return true;
}
public bool BaseCalled = false;
public virtual void BaseCall()
{
BaseCalled = true;
}
public virtual int Generic<T>()
{
return 0;
}
public bool BaseReturnCalled = false;
public virtual bool BaseCall(string value)
{
BaseReturnCalled = true;
return default(bool);
}
}
public interface INewFoo : IFoo
{
new INewBar Bar { get; set; }
}
public interface INewBar : IBar
{
}
// Note that this test requires that there be no [assembly: InternalsVisibleTo("DynamicProxyGenAssembly2")]
// or similar defined in this test assembly. If some other test requires that internals be made
// visible to DynamicProxy, then this test must be disabled.
[Fact]
public void SetupOnInaccessibleMethodThrowsException()
{
var mock = new Mock<Accessibility.ClassWithAccessibleAndInaccessibleMethod>();
var error = Record.Exception(() =>
{
mock.Setup(m => m.Internal());
});
Assert.NotNull(error);
Assert.IsType<ArgumentException>(error);
Assert.Contains("accessible", error.Message);
Assert.Contains("proxy generator", error.Message);
}
[Fact]
public void SetupOnAccessibleMethodDoesNotThrowException()
{
var mock = new Mock<Accessibility.ClassWithAccessibleAndInaccessibleMethod>();
var error = Record.Exception(() =>
{
mock.Setup(m => m.Public());
});
Assert.Null(error);
}
public class Accessibility
{
public class ClassWithAccessibleAndInaccessibleMethod
{
public virtual void Public() => throw new InvalidOperationException("Public");
internal virtual void Internal() => throw new InvalidOperationException("Internal");
}
}
[Fact]
public void New_mock_has_no_preconfigured_default_return_values()
{
var mock = new Mock<object>();
Assert.Empty(mock.ConfiguredDefaultValues);
}
[Fact]
public void SetReturnsDefault_records_a_configured_default_return_value()
{
var mock = new Mock<object>();
mock.SetReturnsDefault<int>(123);
Assert.NotEmpty(mock.ConfiguredDefaultValues);
}
[Fact]
public void Reset_clears_configured_default_return_values()
{
var mock = new Mock<object>();
mock.SetReturnsDefault<int>(123);
mock.Reset();
Assert.Empty(mock.ConfiguredDefaultValues);
}
#if FEATURE_SERIALIZATION
[Serializable]
public class BadSerializable : ISerializable
{
public void GetObjectData(SerializationInfo info, StreamingContext context)
{
}
}
public interface IHaveBadSerializableProperty
{
int A { get; }
BadSerializable BadSerializable { get; }
string C { get; }
}
[Fact]
public void Accessing_property_of_bad_serializable_type_throws()
{
var mock = new Mock<IHaveBadSerializableProperty>() { DefaultValue = DefaultValue.Mock };
Assert.ThrowsAny<Exception>(() => mock.Object.BadSerializable);
}
[Fact]
public void Accessing_property_of_bad_serializable_type_after_SetupAllProperties_throws()
{
var mock = new Mock<IHaveBadSerializableProperty>() { DefaultValue = DefaultValue.Mock };
mock.SetupAllProperties();
Assert.ThrowsAny<Exception>(() => mock.Object.BadSerializable);
}
#endif
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.Sql;
using Microsoft.Azure.Management.Sql.Models;
namespace Microsoft.Azure.Management.Sql
{
/// <summary>
/// The Windows Azure SQL Database management API provides a RESTful set of
/// web services that interact with Windows Azure SQL Database services to
/// manage your databases. The API enables users to create, retrieve,
/// update, and delete databases and servers.
/// </summary>
public static partial class ServerUpgradeOperationsExtensions
{
/// <summary>
/// Cancel a pending upgrade for the Azure SQL Database server.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.IServerUpgradeOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server to cancel
/// upgrade.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Cancel(this IServerUpgradeOperations operations, string resourceGroupName, string serverName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerUpgradeOperations)s).CancelAsync(resourceGroupName, serverName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Cancel a pending upgrade for the Azure SQL Database server.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.IServerUpgradeOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server to cancel
/// upgrade.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> CancelAsync(this IServerUpgradeOperations operations, string resourceGroupName, string serverName)
{
return operations.CancelAsync(resourceGroupName, serverName, CancellationToken.None);
}
/// <summary>
/// Returns information about Upgrade status of an Azure SQL Database
/// Server.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.IServerUpgradeOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server to upgrade.
/// </param>
/// <returns>
/// Represents the response to a Get request for Upgrade status of an
/// Azure SQL Database Server.
/// </returns>
public static ServerUpgradeGetResponse Get(this IServerUpgradeOperations operations, string resourceGroupName, string serverName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerUpgradeOperations)s).GetAsync(resourceGroupName, serverName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about Upgrade status of an Azure SQL Database
/// Server.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.IServerUpgradeOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server to upgrade.
/// </param>
/// <returns>
/// Represents the response to a Get request for Upgrade status of an
/// Azure SQL Database Server.
/// </returns>
public static Task<ServerUpgradeGetResponse> GetAsync(this IServerUpgradeOperations operations, string resourceGroupName, string serverName)
{
return operations.GetAsync(resourceGroupName, serverName, CancellationToken.None);
}
/// <summary>
/// Start an Azure SQL Database Server Upgrade.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.IServerUpgradeOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server to upgrade.
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for the Azure SQL Database Server
/// Upgrade.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Start(this IServerUpgradeOperations operations, string resourceGroupName, string serverName, ServerUpgradeStartParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerUpgradeOperations)s).StartAsync(resourceGroupName, serverName, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Start an Azure SQL Database Server Upgrade.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.IServerUpgradeOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Database Server to upgrade.
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for the Azure SQL Database Server
/// Upgrade.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> StartAsync(this IServerUpgradeOperations operations, string resourceGroupName, string serverName, ServerUpgradeStartParameters parameters)
{
return operations.StartAsync(resourceGroupName, serverName, parameters, CancellationToken.None);
}
}
}
| |
// Radial Menu|Prefabs|0040
namespace VRTK
{
using UnityEngine;
using System.Collections;
using UnityEngine.Events;
using System.Collections.Generic;
using UnityEngine.UI;
using UnityEngine.EventSystems;
public delegate void HapticPulseEventHandler(float strength);
/// <summary>
/// This adds a UI element into the world space that can be dropped into a Controller object and used to create and use Radial Menus from the touchpad.
/// </summary>
/// <remarks>
/// If the RadialMenu is placed inside a controller, it will automatically find a `VRTK_ControllerEvents` in its parent to use at the input. However, a `VRTK_ControllerEvents` can be defined explicitly by setting the `Events` parameter of the `Radial Menu Controller` script also attached to the prefab.
///
/// The RadialMenu can also be placed inside a `VRTK_InteractableObject` for the RadialMenu to be anchored to a world object instead of the controller. The `Events Manager` parameter will automatically be set if the RadialMenu is a child of an InteractableObject, but it can also be set manually in the inspector. Additionally, for the RadialMenu to be anchored in the world, the `RadialMenuController` script in the prefab must be replaced with `VRTK_IndependentRadialMenuController`. See the script information for further details on making the RadialMenu independent of the controllers.
/// </remarks>
/// <example>
/// `VRTK/Examples/030_Controls_RadialTouchpadMenu` displays a radial menu for each controller. The left controller uses the `Hide On Release` variable, so it will only be visible if the left touchpad is being touched. It also uses the `Execute On Unclick` variable to delay execution until the touchpad button is unclicked. The example scene also contains a demonstration of anchoring the RadialMenu to an interactable cube instead of a controller.
/// </example>
[ExecuteInEditMode]
public class RadialMenu : MonoBehaviour
{
[System.Serializable]
public class RadialMenuButton
{
public Sprite ButtonIcon;
public UnityEvent OnClick = new UnityEvent();
public UnityEvent OnHold = new UnityEvent();
public UnityEvent OnHoverEnter = new UnityEvent();
public UnityEvent OnHoverExit = new UnityEvent();
}
public enum ButtonEvent
{
hoverOn,
hoverOff,
click,
unclick
}
[Tooltip("An array of Buttons that define the interactive buttons required to be displayed as part of the radial menu.")]
public List<RadialMenuButton> buttons;
[Tooltip("The base for each button in the menu, by default set to a dynamic circle arc that will fill up a portion of the menu.")]
public GameObject buttonPrefab;
[Tooltip("If checked, then the buttons will be auto generated on awake.")]
public bool generateOnAwake = true;
[Tooltip("Percentage of the menu the buttons should fill, 1.0 is a pie slice, 0.1 is a thin ring.")]
[Range(0f, 1f)]
public float buttonThickness = 0.5f;
[Tooltip("The background colour of the buttons, default is white.")]
public Color buttonColor = Color.white;
[Tooltip("The distance the buttons should move away from the centre. This creates space between the individual buttons.")]
public float offsetDistance = 1;
[Tooltip("The additional rotation of the Radial Menu.")]
[Range(0, 359)]
public float offsetRotation;
[Tooltip("Whether button icons should rotate according to their arc or be vertical compared to the controller.")]
public bool rotateIcons;
[Tooltip("The margin in pixels that the icon should keep within the button.")]
public float iconMargin;
[Tooltip("Whether the buttons are shown")]
public bool isShown;
[Tooltip("Whether the buttons should be visible when not in use.")]
public bool hideOnRelease;
[Tooltip("Whether the button action should happen when the button is released, as opposed to happening immediately when the button is pressed.")]
public bool executeOnUnclick;
[Tooltip("The base strength of the haptic pulses when the selected button is changed, or a button is pressed. Set to zero to disable.")]
[Range(0, 1)]
public float baseHapticStrength;
public event HapticPulseEventHandler FireHapticPulse;
//Has to be public to keep state from editor -> play mode?
[Tooltip("The actual GameObjects that make up the radial menu.")]
public List<GameObject> menuButtons;
protected int currentHover = -1;
protected int currentPress = -1;
/// <summary>
/// The HoverButton method is used to set the button hover at a given angle.
/// </summary>
/// <param name="angle">The angle on the radial menu.</param>
public virtual void HoverButton(float angle)
{
InteractButton(angle, ButtonEvent.hoverOn);
}
/// <summary>
/// The ClickButton method is used to set the button click at a given angle.
/// </summary>
/// <param name="angle">The angle on the radial menu.</param>
public virtual void ClickButton(float angle)
{
InteractButton(angle, ButtonEvent.click);
}
/// <summary>
/// The UnClickButton method is used to set the button unclick at a given angle.
/// </summary>
/// <param name="angle">The angle on the radial menu.</param>
public virtual void UnClickButton(float angle)
{
InteractButton(angle, ButtonEvent.unclick);
}
/// <summary>
/// The ToggleMenu method is used to show or hide the radial menu.
/// </summary>
public virtual void ToggleMenu()
{
if (isShown)
{
HideMenu(true);
}
else
{
ShowMenu();
}
}
/// <summary>
/// The StopTouching method is used to stop touching the menu.
/// </summary>
public virtual void StopTouching()
{
if (currentHover != -1)
{
var pointer = new PointerEventData(EventSystem.current);
ExecuteEvents.Execute(menuButtons[currentHover], pointer, ExecuteEvents.pointerExitHandler);
buttons[currentHover].OnHoverExit.Invoke();
currentHover = -1;
}
}
/// <summary>
/// The ShowMenu method is used to show the menu.
/// </summary>
public virtual void ShowMenu()
{
if (!isShown)
{
isShown = true;
StopCoroutine("TweenMenuScale");
StartCoroutine("TweenMenuScale", isShown);
}
}
/// <summary>
/// The GetButton method is used to get a button from the menu.
/// </summary>
/// <param name="id">The id of the button to retrieve.</param>
/// <returns>The found radial menu button.</returns>
public virtual RadialMenuButton GetButton(int id)
{
if (id < buttons.Count)
{
return buttons[id];
}
return null;
}
/// <summary>
/// The HideMenu method is used to hide the menu.
/// </summary>
/// <param name="force">If true then the menu is always hidden.</param>
public virtual void HideMenu(bool force)
{
if (isShown && (hideOnRelease || force))
{
isShown = false;
StopCoroutine("TweenMenuScale");
StartCoroutine("TweenMenuScale", isShown);
}
}
/// <summary>
/// The RegenerateButtons method creates all the button arcs and populates them with desired icons.
/// </summary>
public void RegenerateButtons()
{
RemoveAllButtons();
for (int i = 0; i < buttons.Count; i++)
{
// Initial placement/instantiation
GameObject newButton = Instantiate(buttonPrefab);
newButton.transform.SetParent(transform);
newButton.transform.localScale = Vector3.one;
newButton.GetComponent<RectTransform>().offsetMax = Vector2.zero;
newButton.GetComponent<RectTransform>().offsetMin = Vector2.zero;
//Setup button arc
UICircle circle = newButton.GetComponent<UICircle>();
if (buttonThickness == 1)
{
circle.fill = true;
}
else
{
circle.thickness = (int)(buttonThickness * (GetComponent<RectTransform>().rect.width / 2f));
}
int fillPerc = (int)(100f / buttons.Count);
circle.fillPercent = fillPerc;
circle.color = buttonColor;
//Final placement/rotation
float angle = ((360 / buttons.Count) * i) + offsetRotation;
newButton.transform.localEulerAngles = new Vector3(0, 0, angle);
newButton.layer = 4; //UI Layer
newButton.transform.localPosition = Vector3.zero;
if (circle.fillPercent < 55)
{
float angleRad = (angle * Mathf.PI) / 180f;
Vector2 angleVector = new Vector2(-Mathf.Cos(angleRad), -Mathf.Sin(angleRad));
newButton.transform.localPosition += (Vector3)angleVector * offsetDistance;
}
//Place and populate Button Icon
GameObject buttonIcon = newButton.GetComponentInChildren<RadialButtonIcon>().gameObject;
if (buttons[i].ButtonIcon == null)
{
buttonIcon.SetActive(false);
}
else
{
buttonIcon.GetComponent<Image>().sprite = buttons[i].ButtonIcon;
buttonIcon.transform.localPosition = new Vector2(-1 * ((newButton.GetComponent<RectTransform>().rect.width / 2f) - (circle.thickness / 2f)), 0);
//Min icon size from thickness and arc
float scale1 = Mathf.Abs(circle.thickness);
float R = Mathf.Abs(buttonIcon.transform.localPosition.x);
float bAngle = (359f * circle.fillPercent * 0.01f * Mathf.PI) / 180f;
float scale2 = (R * 2 * Mathf.Sin(bAngle / 2f));
if (circle.fillPercent > 24) //Scale calc doesn't work for > 90 degrees
{
scale2 = float.MaxValue;
}
float iconScale = Mathf.Min(scale1, scale2) - iconMargin;
buttonIcon.GetComponent<RectTransform>().sizeDelta = new Vector2(iconScale, iconScale);
//Rotate icons all vertically if desired
if (!rotateIcons)
{
buttonIcon.transform.eulerAngles = GetComponentInParent<Canvas>().transform.eulerAngles;
}
}
menuButtons.Add(newButton);
}
}
/// <summary>
/// The AddButton method is used to add a new button to the menu.
/// </summary>
/// <param name="newButton">The button to add.</param>
public void AddButton(RadialMenuButton newButton)
{
buttons.Add(newButton);
RegenerateButtons();
}
protected virtual void Awake()
{
if (Application.isPlaying)
{
if (!isShown)
{
transform.localScale = Vector3.zero;
}
if (generateOnAwake)
{
RegenerateButtons();
}
}
}
protected virtual void Update()
{
//Keep track of pressed button and constantly invoke Hold event
if (currentPress != -1)
{
buttons[currentPress].OnHold.Invoke();
}
}
//Turns and Angle and Event type into a button action
protected virtual void InteractButton(float angle, ButtonEvent evt) //Can't pass ExecuteEvents as parameter? Unity gives error
{
//Get button ID from angle
float buttonAngle = 360f / buttons.Count; //Each button is an arc with this angle
angle = VRTK_SharedMethods.Mod((angle + -offsetRotation), 360); //Offset the touch coordinate with our offset
int buttonID = (int)VRTK_SharedMethods.Mod(((angle + (buttonAngle / 2f)) / buttonAngle), buttons.Count); //Convert angle into ButtonID (This is the magic)
var pointer = new PointerEventData(EventSystem.current); //Create a new EventSystem (UI) Event
//If we changed buttons while moving, un-hover and un-click the last button we were on
if (currentHover != buttonID && currentHover != -1)
{
ExecuteEvents.Execute(menuButtons[currentHover], pointer, ExecuteEvents.pointerUpHandler);
ExecuteEvents.Execute(menuButtons[currentHover], pointer, ExecuteEvents.pointerExitHandler);
buttons[currentHover].OnHoverExit.Invoke();
if (executeOnUnclick && currentPress != -1)
{
ExecuteEvents.Execute(menuButtons[buttonID], pointer, ExecuteEvents.pointerDownHandler);
AttempHapticPulse(baseHapticStrength * 1.666f);
}
}
if (evt == ButtonEvent.click) //Click button if click, and keep track of current press (executes button action)
{
ExecuteEvents.Execute(menuButtons[buttonID], pointer, ExecuteEvents.pointerDownHandler);
currentPress = buttonID;
if (!executeOnUnclick)
{
buttons[buttonID].OnClick.Invoke();
AttempHapticPulse(baseHapticStrength * 2.5f);
}
}
else if (evt == ButtonEvent.unclick) //Clear press id to stop invoking OnHold method (hide menu)
{
ExecuteEvents.Execute(menuButtons[buttonID], pointer, ExecuteEvents.pointerUpHandler);
currentPress = -1;
if (executeOnUnclick)
{
AttempHapticPulse(baseHapticStrength * 2.5f);
buttons[buttonID].OnClick.Invoke();
}
}
else if (evt == ButtonEvent.hoverOn && currentHover != buttonID) // Show hover UI event (darken button etc). Show menu
{
ExecuteEvents.Execute(menuButtons[buttonID], pointer, ExecuteEvents.pointerEnterHandler);
buttons[buttonID].OnHoverEnter.Invoke();
AttempHapticPulse(baseHapticStrength);
}
currentHover = buttonID; //Set current hover ID, need this to un-hover if selected button changes
}
//Simple tweening for menu, scales linearly from 0 to 1 and 1 to 0
protected virtual IEnumerator TweenMenuScale(bool show)
{
float targetScale = 0;
Vector3 Dir = -1 * Vector3.one;
if (show)
{
targetScale = 1;
Dir = Vector3.one;
}
int i = 0; //Sanity check for infinite loops
while (i < 250 && ((show && transform.localScale.x < targetScale) || (!show && transform.localScale.x > targetScale)))
{
transform.localScale += Dir * Time.deltaTime * 4f; //Tweening function - currently 0.25 second linear
yield return true;
i++;
}
transform.localScale = Dir * targetScale;
StopCoroutine("TweenMenuScale");
}
protected virtual void AttempHapticPulse(float strength)
{
if (strength > 0 && FireHapticPulse != null)
{
FireHapticPulse(strength);
}
}
protected virtual void RemoveAllButtons()
{
if (menuButtons == null)
{
menuButtons = new List<GameObject>();
}
for (int i = 0; i < menuButtons.Count; i++)
{
DestroyImmediate(menuButtons[i]);
}
menuButtons = new List<GameObject>();
}
}
}
| |
//******************************************************************************************************************************************************************************************//
// Public Domain //
// //
// Written by Peter O. in 2014. //
// //
// Any copyright is dedicated to the Public Domain. http://creativecommons.org/publicdomain/zero/1.0/ //
// //
// If you like this, you should donate to Peter O. at: http://peteroupc.github.io/ //
//******************************************************************************************************************************************************************************************//
using System;
using System.Collections.Generic;
using System.Text;
using Neos.IdentityServer.MultiFactor.WebAuthN.Library.Cbor.Numbers;
namespace Neos.IdentityServer.MultiFactor.WebAuthN.Library.Cbor
{
internal sealed class CBORJson {
// JSON parsing methods
private int SkipWhitespaceJSON() {
while (true) {
int c = this.ReadChar();
if (c == -1 || (c != 0x20 && c != 0x0a && c != 0x0d && c != 0x09)) {
return c;
}
}
}
// JSON parsing methods
private int SkipWhitespaceJSON(int lastChar) {
while (lastChar == 0x20 || lastChar == 0x0a || lastChar == 0x0d ||
lastChar == 0x09) {
lastChar = this.ReadChar();
}
return lastChar;
}
public void SkipToEnd() {
if (this.jsonSequenceMode) {
while (this.ReadChar() >= 0) {
// Loop
}
}
}
public int ReadChar() {
if (this.jsonSequenceMode) {
if (this.recordSeparatorSeen) {
return -1;
}
int rc = this.reader.ReadChar();
if (rc == 0x1e) {
this.recordSeparatorSeen = true;
return -1;
}
return rc;
} else {
return this.reader.ReadChar();
}
}
private void RaiseError(string str) {
this.reader.RaiseError(str);
}
private readonly JSONOptions options;
private CharacterInputWithCount reader;
private StringBuilder sb;
private bool jsonSequenceMode;
private bool recordSeparatorSeen;
private string NextJSONString() {
int c;
this.sb = this.sb ?? new StringBuilder();
this.sb.Remove(0, this.sb.Length);
while (true) {
c = this.ReadChar();
if (c == -1 || c < 0x20) {
this.RaiseError("Unterminated string");
}
switch (c) {
case '\\':
c = this.ReadChar();
switch (c) {
case '\\':
case '/':
case '\"':
// Slash is now allowed to be escaped under RFC 8259
this.sb.Append((char)c);
break;
case 'b':
this.sb.Append('\b');
break;
case 'f':
this.sb.Append('\f');
break;
case 'n':
this.sb.Append('\n');
break;
case 'r':
this.sb.Append('\r');
break;
case 't':
this.sb.Append('\t');
break;
case 'u': { // Unicode escape
c = 0;
// Consists of 4 hex digits
for (var i = 0; i < 4; ++i) {
int ch = this.ReadChar();
if (ch >= '0' && ch <= '9') {
c <<= 4;
c |= ch - '0';
} else if (ch >= 'A' && ch <= 'F') {
c <<= 4;
c |= ch + 10 - 'A';
} else if (ch >= 'a' && ch <= 'f') {
c <<= 4;
c |= ch + 10 - 'a';
} else {
this.RaiseError(
"Invalid Unicode escaped character");
}
}
if ((c & 0xf800) != 0xd800) {
// Non-surrogate
this.sb.Append((char)c);
} else if ((c & 0xfc00) == 0xd800) {
int ch = this.ReadChar();
if (ch != '\\' || this.ReadChar() != 'u') {
this.RaiseError("Invalid escaped character");
}
var c2 = 0;
for (var i = 0; i < 4; ++i) {
ch = this.ReadChar();
if (ch >= '0' && ch <= '9') {
c2 <<= 4;
c2 |= ch - '0';
} else if (ch >= 'A' && ch <= 'F') {
c2 <<= 4;
c2 |= ch + 10 - 'A';
} else if (ch >= 'a' && ch <= 'f') {
c2 <<= 4;
c2 |= ch + 10 - 'a';
} else {
this.RaiseError(
"Invalid Unicode escaped character");
}
}
if ((c2 & 0xfc00) != 0xdc00) {
this.RaiseError("Unpaired surrogate code point");
} else {
this.sb.Append((char)c);
this.sb.Append((char)c2);
}
} else {
this.RaiseError("Unpaired surrogate code point");
}
break;
}
default: {
this.RaiseError("Invalid escaped character");
break;
}
}
break;
case 0x22: // double quote
return this.sb.ToString();
default: {
// NOTE: Assumes the character reader
// throws an error on finding illegal surrogate
// pairs in the string or invalid encoding
// in the stream
if ((c >> 16) == 0) {
this.sb.Append((char)c);
} else {
this.sb.Append((char)((((c - 0x10000) >> 10) & 0x3ff) |
0xd800));
this.sb.Append((char)(((c - 0x10000) & 0x3ff) | 0xdc00));
}
break;
}
}
}
}
private CBORObject NextJSONNegativeNumber(
int[] nextChar,
int depth) {
string str;
CBORObject obj;
int c = this.ReadChar();
if (c < '0' || c > '9') {
this.RaiseError("JSON number can't be parsed.");
}
int cval = -(c - '0');
int cstart = c;
c = this.ReadChar();
this.sb = this.sb ?? new StringBuilder();
this.sb.Remove(0, this.sb.Length);
this.sb.Append('-');
this.sb.Append((char)cstart);
var charbuf = new char[32];
var charbufptr = 0;
while (c == '-' || c == '+' || c == '.' || (c >= '0' && c <= '9') ||
c == 'e' || c == 'E') {
charbuf[charbufptr++] = (char)c;
if (charbufptr >= 32) {
this.sb.Append(charbuf, 0, 32);
charbufptr = 0;
}
c = this.ReadChar();
}
if (charbufptr > 0) {
this.sb.Append(charbuf, 0, charbufptr);
}
// DebugUtility.Log("--nega=" + sw.ElapsedMilliseconds + " ms");
// check if character can validly appear after a JSON number
if (c != ',' && c != ']' && c != '}' && c != -1 &&
c != 0x20 && c != 0x0a && c != 0x0d && c != 0x09) {
this.RaiseError("Invalid character after JSON number");
}
str = this.sb.ToString();
// DebugUtility.Log("negb=" + sw.ElapsedMilliseconds + " ms");
obj = CBORDataUtilities.ParseJSONNumber(str, this.options);
// DebugUtility.Log("negc=" + sw.ElapsedMilliseconds + " ms");
if (obj == null) {
string errstr = (str.Length <= 100) ? str : (str.Substring(0,
100) + "...");
this.RaiseError("JSON number can't be parsed. " + errstr);
}
if (c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09) {
nextChar[0] = this.SkipWhitespaceJSON();
} else if (this.jsonSequenceMode && depth == 0) {
nextChar[0] = c;
this.RaiseError("JSON whitespace expected after top-level " +
"number in JSON sequence");
} else {
nextChar[0] = c;
}
return obj;
}
private CBORObject NextJSONValue(
int firstChar,
int[] nextChar,
int depth) {
string str;
int c = firstChar;
CBORObject obj = null;
if (c < 0) {
this.RaiseError("Unexpected end of data");
}
switch (c) {
case '"': {
// Parse a string
// The tokenizer already checked the string for invalid
// surrogate pairs, so just call the CBORObject
// constructor directly
obj = CBORObject.FromRaw(this.NextJSONString());
nextChar[0] = this.SkipWhitespaceJSON();
return obj;
}
case '{': {
// Parse an object
obj = this.ParseJSONObject(depth + 1);
nextChar[0] = this.SkipWhitespaceJSON();
return obj;
}
case '[': {
// Parse an array
obj = this.ParseJSONArray(depth + 1);
nextChar[0] = this.SkipWhitespaceJSON();
return obj;
}
case 't': {
// Parse true
if ((c = this.ReadChar()) != 'r' || (c = this.ReadChar()) != 'u' ||
(c = this.ReadChar()) != 'e') {
this.RaiseError("Value can't be parsed.");
}
c = this.ReadChar();
if (c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09) {
nextChar[0] = this.SkipWhitespaceJSON();
} else if (this.jsonSequenceMode && depth == 0) {
nextChar[0] = c;
this.RaiseError("JSON whitespace expected after top-level " +
"number in JSON sequence");
} else {
nextChar[0] = c;
}
return CBORObject.True;
}
case 'f': {
// Parse false
if ((c = this.ReadChar()) != 'a' || (c = this.ReadChar()) != 'l' ||
(c = this.ReadChar()) != 's' || (c = this.ReadChar()) != 'e') {
this.RaiseError("Value can't be parsed.");
}
c = this.ReadChar();
if (c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09) {
nextChar[0] = this.SkipWhitespaceJSON();
} else if (this.jsonSequenceMode && depth == 0) {
nextChar[0] = c;
this.RaiseError("JSON whitespace expected after top-level " +
"number in JSON sequence");
} else {
nextChar[0] = c;
}
return CBORObject.False;
}
case 'n': {
// Parse null
if ((c = this.ReadChar()) != 'u' || (c = this.ReadChar()) != 'l' ||
(c = this.ReadChar()) != 'l') {
this.RaiseError("Value can't be parsed.");
}
c = this.ReadChar();
if (c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09) {
nextChar[0] = this.SkipWhitespaceJSON();
} else if (this.jsonSequenceMode && depth == 0) {
nextChar[0] = c;
this.RaiseError("JSON whitespace expected after top-level " +
"number in JSON sequence");
} else {
nextChar[0] = c;
}
return CBORObject.Null;
}
case '-': {
// Parse a negative number
return this.NextJSONNegativeNumber(nextChar, depth);
}
case '0':
case '1':
case '2':
case '3':
case '4':
case '5':
case '6':
case '7':
case '8':
case '9': {
// Parse a nonnegative number
int cval = c - '0';
int cstart = c;
var needObj = true;
c = this.ReadChar();
if (!(c == '-' || c == '+' || c == '.' || (c >= '0' && c <= '9') ||
c == 'e' || c == 'E')) {
// Optimize for common case where JSON number
// is a single digit without sign or exponent
obj = CBORDataUtilities.ParseSmallNumber(cval, this.options);
needObj = false;
} else if (c >= '0' && c <= '9') {
int csecond = c;
if (cstart == '0') {
// Leading zero followed by any digit is not allowed
this.RaiseError("JSON number can't be parsed.");
}
cval = (cval * 10) + (int)(c - '0');
c = this.ReadChar();
if (c >= '0' && c <= '9') {
var digits = 2;
var ctmp = new int[10];
ctmp[0] = cstart;
ctmp[1] = csecond;
while (digits < 9 && (c >= '0' && c <= '9')) {
cval = (cval * 10) + (int)(c - '0');
ctmp[digits++] = c;
c = this.ReadChar();
}
if (c == 'e' || c == 'E' || c == '.' || (c >= '0' && c <= '9')) {
// Not an all-digit number, or too long
this.sb = this.sb ?? new StringBuilder();
this.sb.Remove(0, this.sb.Length);
for (var vi = 0; vi < digits; ++vi) {
this.sb.Append((char)ctmp[vi]);
}
} else {
obj = CBORDataUtilities.ParseSmallNumber(cval, this.options);
needObj = false;
}
} else if (!(c == '-' || c == '+' || c == '.' || c == 'e' || c
== 'E')) {
// Optimize for common case where JSON number
// is two digits without sign, decimal point, or exponent
obj = CBORDataUtilities.ParseSmallNumber(cval, this.options);
needObj = false;
} else {
this.sb = this.sb ?? new StringBuilder();
this.sb.Remove(0, this.sb.Length);
this.sb.Append((char)cstart);
this.sb.Append((char)csecond);
}
} else {
this.sb = this.sb ?? new StringBuilder();
this.sb.Remove(0, this.sb.Length);
this.sb.Append((char)cstart);
}
if (needObj) {
var charbuf = new char[32];
var charbufptr = 0;
while (
c == '-' || c == '+' || c == '.' || (c >= '0' && c <= '9') ||
c == 'e' || c == 'E') {
charbuf[charbufptr++] = (char)c;
if (charbufptr >= 32) {
this.sb.Append(charbuf, 0, 32);
charbufptr = 0;
}
c = this.ReadChar();
}
if (charbufptr > 0) {
this.sb.Append(charbuf, 0, charbufptr);
}
// check if character can validly appear after a JSON number
if (c != ',' && c != ']' && c != '}' && c != -1 &&
c != 0x20 && c != 0x0a && c != 0x0d && c != 0x09) {
this.RaiseError("Invalid character after JSON number");
}
str = this.sb.ToString();
obj = CBORDataUtilities.ParseJSONNumber(str, this.options);
if (obj == null) {
string errstr = (str.Length <= 100) ? str : (str.Substring(0,
100) + "...");
this.RaiseError("JSON number can't be parsed. " + errstr);
}
}
if (c == 0x20 || c == 0x0a || c == 0x0d || c == 0x09) {
nextChar[0] = this.SkipWhitespaceJSON();
} else if (this.jsonSequenceMode && depth == 0) {
nextChar[0] = c;
this.RaiseError("JSON whitespace expected after top-level " +
"number in JSON sequence");
} else {
nextChar[0] = c;
}
return obj;
}
default: this.RaiseError("Value can't be parsed.");
break;
}
return null;
}
public CBORJson(CharacterInputWithCount reader, JSONOptions options) {
this.reader = reader;
this.sb = null;
this.options = options;
this.jsonSequenceMode = false;
this.recordSeparatorSeen = false;
}
public CBORObject ParseJSON(int[] nextChar) {
int c;
CBORObject ret;
c = this.jsonSequenceMode ? this.SkipWhitespaceJSON(nextChar[0]) :
this.SkipWhitespaceJSON();
if (c == '[') {
ret = this.ParseJSONArray(0);
nextChar[0] = this.SkipWhitespaceJSON();
return ret;
}
if (c == '{') {
ret = this.ParseJSONObject(0);
nextChar[0] = this.SkipWhitespaceJSON();
return ret;
}
return this.NextJSONValue(c, nextChar, 0);
}
private void SetJSONSequenceMode() {
this.jsonSequenceMode = true;
this.recordSeparatorSeen = false;
}
private void ResetJSONSequenceMode() {
this.jsonSequenceMode = true;
this.recordSeparatorSeen = false;
}
internal static CBORObject ParseJSONValue(
CharacterInputWithCount reader,
JSONOptions options,
int[] nextChar) {
var cj = new CBORJson(reader, options);
return cj.ParseJSON(nextChar);
}
internal bool SkipRecordSeparators(int[] nextChar, bool
recordSeparatorSeen) {
if (this.jsonSequenceMode) {
while (true) {
int rc = this.reader.ReadChar();
nextChar[0] = rc;
if (rc == 0x1e) {
recordSeparatorSeen = true;
} else {
return recordSeparatorSeen;
}
}
} else {
nextChar[0] = -1;
return false;
}
}
internal static CBORObject[] ParseJSONSequence(
CharacterInputWithCount reader,
JSONOptions options,
int[] nextChar) {
var cj = new CBORJson(reader, options);
cj.SetJSONSequenceMode();
bool seenSeparator = cj.SkipRecordSeparators(nextChar, false);
if (nextChar[0] >= 0 && !seenSeparator) {
// Stream is not empty and did not begin with
// record separator
cj.RaiseError("Not a JSON text sequence");
} else if (nextChar[0] < 0 && !seenSeparator) {
// Stream is empty
return new CBORObject[0];
} else if (nextChar[0] < 0) {
// Stream had only record separators, so we found
// a truncated JSON text
return new CBORObject[] { null };
}
var list = new List<CBORObject>();
while (true) {
CBORObject co;
try {
co = cj.ParseJSON(nextChar);
} catch (CBORException) {
cj.SkipToEnd();
co = null;
}
if (co != null && nextChar[0] >= 0) {
// End of JSON text not reached
cj.SkipToEnd();
co = null;
}
list.Add(co);
if (!cj.recordSeparatorSeen) {
// End of the stream was reached
nextChar[0] = -1;
break;
} else {
// A record separator was seen, so
// another JSON text follows
cj.ResetJSONSequenceMode();
cj.SkipRecordSeparators(nextChar, true);
if (nextChar[0] < 0) {
// Rest of stream had only record separators, so we found
// a truncated JSON text
list.Add(null);
break;
}
}
}
return (CBORObject[])list.ToArray();
}
private CBORObject ParseJSONObject(int depth) {
// Assumes that the last character read was '{'
if (depth > 1000) {
this.RaiseError("Too deeply nested");
}
int c;
CBORObject key = null;
CBORObject obj;
var nextChar = new int[1];
var seenComma = false;
var myHashMap = new Dictionary<CBORObject, CBORObject>();
while (true) {
c = this.SkipWhitespaceJSON();
switch (c) {
case -1:
this.RaiseError("A JSON object must end with '}'");
break;
case '}':
if (seenComma) {
// Situation like '{"0"=>1,}'
this.RaiseError("Trailing comma");
return null;
}
return CBORObject.FromRaw(myHashMap);
default: {
// Read the next string
if (c < 0) {
this.RaiseError("Unexpected end of data");
return null;
}
if (c != '"') {
this.RaiseError("Expected a string as a key");
return null;
}
// Parse a string that represents the object's key.
// The tokenizer already checked the string for invalid
// surrogate pairs, so just call the CBORObject
// constructor directly
obj = CBORObject.FromRaw(this.NextJSONString());
key = obj;
if (!this.options.AllowDuplicateKeys &&
myHashMap.ContainsKey(obj)) {
this.RaiseError("Key already exists: " + key);
return null;
}
break;
}
}
if (this.SkipWhitespaceJSON() != ':') {
this.RaiseError("Expected a ':' after a key");
}
// NOTE: Will overwrite existing value
myHashMap[key] = this.NextJSONValue(
this.SkipWhitespaceJSON(),
nextChar,
depth);
switch (nextChar[0]) {
case ',':
seenComma = true;
break;
case '}':
return CBORObject.FromRaw(myHashMap);
default: this.RaiseError("Expected a ',' or '}'");
break;
}
}
}
internal CBORObject ParseJSONArray(int depth) {
// Assumes that the last character read was '['
if (depth > 1000) {
this.RaiseError("Too deeply nested");
}
var myArrayList = new List<CBORObject>();
var seenComma = false;
var nextChar = new int[1];
while (true) {
int c = this.SkipWhitespaceJSON();
if (c == ']') {
if (seenComma) {
// Situation like '[0,1,]'
this.RaiseError("Trailing comma");
}
return CBORObject.FromRaw(myArrayList);
}
if (c == ',') {
// Situation like '[,0,1,2]' or '[0,,1]'
this.RaiseError("Empty array element");
}
myArrayList.Add(
this.NextJSONValue(
c,
nextChar,
depth));
c = nextChar[0];
switch (c) {
case ',':
seenComma = true;
break;
case ']':
return CBORObject.FromRaw(myArrayList);
default: this.RaiseError("Expected a ',' or ']'");
break;
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.WindowsAzure.Storage;
using Microsoft.WindowsAzure.Storage.Table;
using Orleans.Runtime;
using LogLevel = Microsoft.Extensions.Logging.LogLevel;
//
// Number of #ifs can be reduced (or removed), once we separate test projects by feature/area, otherwise we are ending up with ambigous types and build errors.
//
#if ORLEANS_CLUSTERING
namespace Orleans.Clustering.AzureStorage
#elif ORLEANS_PERSISTENCE
namespace Orleans.Persistence.AzureStorage
#elif ORLEANS_REMINDERS
namespace Orleans.Reminders.AzureStorage
#elif ORLEANS_STREAMING
namespace Orleans.Streaming.AzureStorage
#elif ORLEANS_EVENTHUBS
namespace Orleans.Streaming.EventHubs
#elif TESTER_AZUREUTILS
namespace Orleans.Tests.AzureUtils
#elif ORLEANS_TRANSACTIONS
namespace Orleans.Transactions.AzureStorage
#else
// No default namespace intentionally to cause compile errors if something is not defined
#endif
{
/// <summary>
/// Utility class to encapsulate row-based access to Azure table storage.
/// </summary>
/// <remarks>
/// These functions are mostly intended for internal usage by Orleans runtime, but due to certain assembly packaging constrants this class needs to have public visibility.
/// </remarks>
/// <typeparam name="T">Table data entry used by this table / manager.</typeparam>
public class AzureTableDataManager<T> where T : class, ITableEntity, new()
{
/// <summary> Name of the table this instance is managing. </summary>
public string TableName { get; private set; }
/// <summary> Logger for this table manager instance. </summary>
protected internal ILogger Logger { get; private set; }
/// <summary> Connection string for the Azure storage account used to host this table. </summary>
protected string ConnectionString { get; set; }
private CloudTable tableReference;
public CloudTable Table => tableReference;
#if !ORLEANS_TRANSACTIONS
private readonly CounterStatistic numServerBusy = CounterStatistic.FindOrCreate(StatisticNames.AZURE_SERVER_BUSY, true);
#endif
/// <summary>
/// Constructor
/// </summary>
/// <param name="tableName">Name of the table to be connected to.</param>
/// <param name="storageConnectionString">Connection string for the Azure storage account used to host this table.</param>
/// <param name="loggerFactory">Logger factory to use.</param>
public AzureTableDataManager(string tableName, string storageConnectionString, ILoggerFactory loggerFactory)
{
Logger = loggerFactory.CreateLogger<AzureTableDataManager<T>>();
TableName = tableName;
ConnectionString = storageConnectionString;
AzureStorageUtils.ValidateTableName(tableName);
}
/// <summary>
/// Connects to, or creates and initializes a new Azure table if it does not already exist.
/// </summary>
/// <returns>Completion promise for this operation.</returns>
public async Task InitTableAsync()
{
const string operation = "InitTable";
var startTime = DateTime.UtcNow;
try
{
CloudTableClient tableCreationClient = GetCloudTableCreationClient();
CloudTable tableRef = tableCreationClient.GetTableReference(TableName);
bool didCreate = await tableRef.CreateIfNotExistsAsync();
Logger.Info((int)Utilities.ErrorCode.AzureTable_01, "{0} Azure storage table {1}", (didCreate ? "Created" : "Attached to"), TableName);
CloudTableClient tableOperationsClient = GetCloudTableOperationsClient();
tableReference = tableOperationsClient.GetTableReference(TableName);
}
catch (Exception exc)
{
Logger.Error((int)Utilities.ErrorCode.AzureTable_02, $"Could not initialize connection to storage table {TableName}", exc);
throw;
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Deletes the Azure table.
/// </summary>
/// <returns>Completion promise for this operation.</returns>
public async Task DeleteTableAsync()
{
const string operation = "DeleteTable";
var startTime = DateTime.UtcNow;
try
{
CloudTableClient tableCreationClient = GetCloudTableCreationClient();
CloudTable tableRef = tableCreationClient.GetTableReference(TableName);
bool didDelete = await tableRef.DeleteIfExistsAsync();
if (didDelete)
{
Logger.Info((int)Utilities.ErrorCode.AzureTable_03, "Deleted Azure storage table {0}", TableName);
}
}
catch (Exception exc)
{
Logger.Error((int)Utilities.ErrorCode.AzureTable_04, "Could not delete storage table {0}", exc);
throw;
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Deletes all entities the Azure table.
/// </summary>
/// <returns>Completion promise for this operation.</returns>
public async Task ClearTableAsync()
{
IEnumerable<Tuple<T,string>> items = await ReadAllTableEntriesAsync();
IEnumerable<Task> work = items.GroupBy(item => item.Item1.PartitionKey)
.SelectMany(partition => partition.ToBatch(AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS))
.Select(batch => DeleteTableEntriesAsync(batch.ToList()));
await Task.WhenAll(work);
}
/// <summary>
/// Create a new data entry in the Azure table (insert new, not update existing).
/// Fails if the data already exists.
/// </summary>
/// <param name="data">Data to be inserted into the table.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
public async Task<string> CreateTableEntryAsync(T data)
{
const string operation = "CreateTableEntry";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Creating {0} table entry: {1}", TableName, data);
try
{
// WAS:
// svc.AddObject(TableName, data);
// SaveChangesOptions.None
try
{
// Presumably FromAsync(BeginExecute, EndExecute) has a slightly better performance then CreateIfNotExistsAsync.
var opResult = await tableReference.ExecuteAsync(TableOperation.Insert(data));
return opResult.Etag;
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data, null, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Inserts a data entry in the Azure table: creates a new one if does not exists or overwrites (without eTag) an already existing version (the "update in place" semantincs).
/// </summary>
/// <param name="data">Data to be inserted or replaced in the table.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
public async Task<string> UpsertTableEntryAsync(T data)
{
const string operation = "UpsertTableEntry";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} entry {1} into table {2}", operation, data, TableName);
try
{
try
{
// WAS:
// svc.AttachTo(TableName, data, null);
// svc.UpdateObject(data);
// SaveChangesOptions.ReplaceOnUpdate,
var opResult = await tableReference.ExecuteAsync(TableOperation.InsertOrReplace(data));
return opResult.Etag;
}
catch (Exception exc)
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_06,
$"Intermediate error upserting entry {(data == null ? "null" : data.ToString())} to the table {TableName}", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Merges a data entry in the Azure table.
/// </summary>
/// <param name="data">Data to be merged in the table.</param>
/// <param name="eTag">ETag to apply.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
internal async Task<string> MergeTableEntryAsync(T data, string eTag)
{
const string operation = "MergeTableEntry";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} entry {1} into table {2}", operation, data, TableName);
try
{
try
{
// WAS:
// svc.AttachTo(TableName, data, ANY_ETAG);
// svc.UpdateObject(data);
data.ETag = eTag;
// Merge requires an ETag (which may be the '*' wildcard).
var opResult = await tableReference.ExecuteAsync(TableOperation.Merge(data));
return opResult.Etag;
}
catch (Exception exc)
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_07,
$"Intermediate error merging entry {(data == null ? "null" : data.ToString())} to the table {TableName}", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Updates a data entry in the Azure table: updates an already existing data in the table, by using eTag.
/// Fails if the data does not already exist or of eTag does not match.
/// </summary>
/// <param name="data">Data to be updated into the table.</param>
/// /// <param name="dataEtag">ETag to use.</param>
/// <returns>Value promise with new Etag for this data entry after completing this storage operation.</returns>
public async Task<string> UpdateTableEntryAsync(T data, string dataEtag)
{
const string operation = "UpdateTableEntryAsync";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} table {1} entry {2}", operation, TableName, data);
try
{
try
{
data.ETag = dataEtag;
var opResult = await tableReference.ExecuteAsync(TableOperation.Replace(data));
//The ETag of data is needed in further operations.
return opResult.Etag;
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data, null, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Deletes an already existing data in the table, by using eTag.
/// Fails if the data does not already exist or if eTag does not match.
/// </summary>
/// <param name="data">Data entry to be deleted from the table.</param>
/// <param name="eTag">ETag to use.</param>
/// <returns>Completion promise for this storage operation.</returns>
public async Task DeleteTableEntryAsync(T data, string eTag)
{
const string operation = "DeleteTableEntryAsync";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} table {1} entry {2}", operation, TableName, data);
try
{
data.ETag = eTag;
try
{
await tableReference.ExecuteAsync(TableOperation.Delete(data));
}
catch (Exception exc)
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_08,
$"Intermediate error deleting entry {data} from the table {TableName}.", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Read a single table entry from the storage table.
/// </summary>
/// <param name="partitionKey">The partition key for the entry.</param>
/// <param name="rowKey">The row key for the entry.</param>
/// <returns>Value promise for tuple containing the data entry and its corresponding etag.</returns>
public async Task<Tuple<T, string>> ReadSingleTableEntryAsync(string partitionKey, string rowKey)
{
const string operation = "ReadSingleTableEntryAsync";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} table {1} partitionKey {2} rowKey = {3}", operation, TableName, partitionKey, rowKey);
T retrievedResult = default(T);
try
{
try
{
string queryString = TableQueryFilterBuilder.MatchPartitionKeyAndRowKeyFilter(partitionKey, rowKey);
var query = new TableQuery<T>().Where(queryString);
TableQuerySegment<T> segment = await tableReference.ExecuteQuerySegmentedAsync(query, null);
retrievedResult = segment.Results.SingleOrDefault();
}
catch (StorageException exception)
{
if (!AzureStorageUtils.TableStorageDataNotFound(exception))
throw;
}
//The ETag of data is needed in further operations.
if (retrievedResult != null) return new Tuple<T, string>(retrievedResult, retrievedResult.ETag);
if (Logger.IsEnabled(LogLevel.Debug)) Logger.Debug("Could not find table entry for PartitionKey={0} RowKey={1}", partitionKey, rowKey);
return null; // No data
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Read all entries in one partition of the storage table.
/// NOTE: This could be an expensive and slow operation for large table partitions!
/// </summary>
/// <param name="partitionKey">The key for the partition to be searched.</param>
/// <returns>Enumeration of all entries in the specified table partition.</returns>
public Task<IEnumerable<Tuple<T, string>>> ReadAllTableEntriesForPartitionAsync(string partitionKey)
{
string query = TableQuery.GenerateFilterCondition(nameof(ITableEntity.PartitionKey), QueryComparisons.Equal, partitionKey);
return ReadTableEntriesAndEtagsAsync(query);
}
/// <summary>
/// Read all entries in the table.
/// NOTE: This could be a very expensive and slow operation for large tables!
/// </summary>
/// <returns>Enumeration of all entries in the table.</returns>
public Task<IEnumerable<Tuple<T, string>>> ReadAllTableEntriesAsync()
{
return ReadTableEntriesAndEtagsAsync(null);
}
/// <summary>
/// Deletes a set of already existing data entries in the table, by using eTag.
/// Fails if the data does not already exist or if eTag does not match.
/// </summary>
/// <param name="collection">Data entries and their corresponding etags to be deleted from the table.</param>
/// <returns>Completion promise for this storage operation.</returns>
public async Task DeleteTableEntriesAsync(IReadOnlyCollection<Tuple<T, string>> collection)
{
const string operation = "DeleteTableEntries";
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Deleting {0} table entries: {1}", TableName, Utils.EnumerableToString(collection));
if (collection == null) throw new ArgumentNullException("collection");
if (collection.Count > AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS)
{
throw new ArgumentOutOfRangeException("collection", collection.Count,
"Too many rows for bulk delete - max " + AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS);
}
if (collection.Count == 0)
{
return;
}
try
{
var entityBatch = new TableBatchOperation();
foreach (var tuple in collection)
{
// WAS:
// svc.AttachTo(TableName, tuple.Item1, tuple.Item2);
// svc.DeleteObject(tuple.Item1);
// SaveChangesOptions.ReplaceOnUpdate | SaveChangesOptions.Batch,
T item = tuple.Item1;
item.ETag = tuple.Item2;
entityBatch.Delete(item);
}
try
{
await tableReference.ExecuteBatchAsync(entityBatch);
}
catch (Exception exc)
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_08,
$"Intermediate error deleting entries {Utils.EnumerableToString(collection)} from the table {TableName}.", exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Read data entries and their corresponding eTags from the Azure table.
/// </summary>
/// <param name="filter">Filter string to use for querying the table and filtering the results.</param>
/// <returns>Enumeration of entries in the table which match the query condition.</returns>
public async Task<IEnumerable<Tuple<T, string>>> ReadTableEntriesAndEtagsAsync(string filter)
{
const string operation = "ReadTableEntriesAndEtags";
var startTime = DateTime.UtcNow;
try
{
TableQuery<T> cloudTableQuery = filter == null
? new TableQuery<T>()
: new TableQuery<T>().Where(filter);
try
{
Func<Task<List<T>>> executeQueryHandleContinuations = async () =>
{
TableQuerySegment<T> querySegment = null;
var list = new List<T>();
//ExecuteSegmentedAsync not supported in "WindowsAzure.Storage": "7.2.1" yet
while (querySegment == null || querySegment.ContinuationToken != null)
{
querySegment = await tableReference.ExecuteQuerySegmentedAsync(cloudTableQuery, querySegment?.ContinuationToken);
list.AddRange(querySegment);
}
return list;
};
#if !ORLEANS_TRANSACTIONS
IBackoffProvider backoff = new FixedBackoff(AzureTableDefaultPolicies.PauseBetweenTableOperationRetries);
List<T> results = await AsyncExecutorWithRetries.ExecuteWithRetries(
counter => executeQueryHandleContinuations(),
AzureTableDefaultPolicies.MaxTableOperationRetries,
(exc, counter) => AzureStorageUtils.AnalyzeReadException(exc.GetBaseException(), counter, TableName, Logger),
AzureTableDefaultPolicies.TableOperationTimeout,
backoff);
#else
List<T> results = await executeQueryHandleContinuations();
#endif
// Data was read successfully if we got to here
return results.Select(i => Tuple.Create(i, i.ETag)).ToList();
}
catch (Exception exc)
{
// Out of retries...
var errorMsg = $"Failed to read Azure storage table {TableName}: {exc.Message}";
if (!AzureStorageUtils.TableStorageDataNotFound(exc))
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_09, errorMsg, exc);
}
throw new OrleansException(errorMsg, exc);
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
/// <summary>
/// Inserts a set of new data entries into the table.
/// Fails if the data does already exists.
/// </summary>
/// <param name="collection">Data entries to be inserted into the table.</param>
/// <returns>Completion promise for this storage operation.</returns>
public async Task BulkInsertTableEntries(IReadOnlyCollection<T> collection)
{
const string operation = "BulkInsertTableEntries";
if (collection == null) throw new ArgumentNullException("collection");
if (collection.Count > AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS)
{
throw new ArgumentOutOfRangeException("collection", collection.Count,
"Too many rows for bulk update - max " + AzureTableDefaultPolicies.MAX_BULK_UPDATE_ROWS);
}
if (collection.Count == 0)
{
return;
}
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("Bulk inserting {0} entries to {1} table", collection.Count, TableName);
try
{
// WAS:
// svc.AttachTo(TableName, entry);
// svc.UpdateObject(entry);
// SaveChangesOptions.None | SaveChangesOptions.Batch,
// SaveChangesOptions.None == Insert-or-merge operation, SaveChangesOptions.Batch == Batch transaction
// http://msdn.microsoft.com/en-us/library/hh452241.aspx
var entityBatch = new TableBatchOperation();
foreach (T entry in collection)
{
entityBatch.Insert(entry);
}
try
{
// http://msdn.microsoft.com/en-us/library/hh452241.aspx
await tableReference.ExecuteBatchAsync(entityBatch);
}
catch (Exception exc)
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_37,
$"Intermediate error bulk inserting {collection.Count} entries in the table {TableName}", exc);
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
internal async Task<Tuple<string, string>> InsertTwoTableEntriesConditionallyAsync(T data1, T data2, string data2Etag)
{
const string operation = "InsertTableEntryConditionally";
string data2Str = (data2 == null ? "null" : data2.ToString());
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} into table {1} data1 {2} data2 {3}", operation, TableName, data1, data2Str);
try
{
try
{
// WAS:
// Only AddObject, do NOT AttachTo. If we did both UpdateObject and AttachTo, it would have been equivalent to InsertOrReplace.
// svc.AddObject(TableName, data);
// ---
// svc.AttachTo(TableName, tableVersion, tableVersionEtag);
// svc.UpdateObject(tableVersion);
// SaveChangesOptions.ReplaceOnUpdate | SaveChangesOptions.Batch,
// EntityDescriptor dataResult = svc.GetEntityDescriptor(data);
// return dataResult.ETag;
var entityBatch = new TableBatchOperation();
entityBatch.Add(TableOperation.Insert(data1));
data2.ETag = data2Etag;
entityBatch.Add(TableOperation.Replace(data2));
var opResults = await tableReference.ExecuteBatchAsync(entityBatch);
//The batch results are returned in order of execution,
//see reference at https://msdn.microsoft.com/en-us/library/microsoft.windowsazure.storage.table.cloudtable.executebatch.aspx.
//The ETag of data is needed in further operations.
return new Tuple<string, string>(opResults[0].Etag, opResults[1].Etag);
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data1, data2Str, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
internal async Task<Tuple<string, string>> UpdateTwoTableEntriesConditionallyAsync(T data1, string data1Etag, T data2, string data2Etag)
{
const string operation = "UpdateTableEntryConditionally";
string data2Str = (data2 == null ? "null" : data2.ToString());
var startTime = DateTime.UtcNow;
if (Logger.IsEnabled(LogLevel.Trace)) Logger.Trace("{0} table {1} data1 {2} data2 {3}", operation, TableName, data1, data2Str);
try
{
try
{
// WAS:
// Only AddObject, do NOT AttachTo. If we did both UpdateObject and AttachTo, it would have been equivalent to InsertOrReplace.
// svc.AttachTo(TableName, data, dataEtag);
// svc.UpdateObject(data);
// ----
// svc.AttachTo(TableName, tableVersion, tableVersionEtag);
// svc.UpdateObject(tableVersion);
// SaveChangesOptions.ReplaceOnUpdate | SaveChangesOptions.Batch,
// EntityDescriptor dataResult = svc.GetEntityDescriptor(data);
// return dataResult.ETag;
var entityBatch = new TableBatchOperation();
data1.ETag = data1Etag;
entityBatch.Add(TableOperation.Replace(data1));
if (data2 != null && data2Etag != null)
{
data2.ETag = data2Etag;
entityBatch.Add(TableOperation.Replace(data2));
}
var opResults = await tableReference.ExecuteBatchAsync(entityBatch);
//The batch results are returned in order of execution,
//see reference at https://msdn.microsoft.com/en-us/library/microsoft.windowsazure.storage.table.cloudtable.executebatch.aspx.
//The ETag of data is needed in further operations.
return new Tuple<string, string>(opResults[0].Etag, opResults[1].Etag);
}
catch (Exception exc)
{
CheckAlertWriteError(operation, data1, data2Str, exc);
throw;
}
}
finally
{
CheckAlertSlowAccess(startTime, operation);
}
}
// Utility methods
private CloudTableClient GetCloudTableOperationsClient()
{
try
{
CloudStorageAccount storageAccount = AzureStorageUtils.GetCloudStorageAccount(ConnectionString);
CloudTableClient operationsClient = storageAccount.CreateCloudTableClient();
operationsClient.DefaultRequestOptions.RetryPolicy = AzureTableDefaultPolicies.TableOperationRetryPolicy;
operationsClient.DefaultRequestOptions.ServerTimeout = AzureTableDefaultPolicies.TableOperationTimeout;
// Values supported can be AtomPub, Json, JsonFullMetadata or JsonNoMetadata with Json being the default value
operationsClient.DefaultRequestOptions.PayloadFormat = TablePayloadFormat.JsonNoMetadata;
return operationsClient;
}
catch (Exception exc)
{
Logger.Error((int)Utilities.ErrorCode.AzureTable_17, "Error creating CloudTableOperationsClient.", exc);
throw;
}
}
private CloudTableClient GetCloudTableCreationClient()
{
try
{
CloudStorageAccount storageAccount = AzureStorageUtils.GetCloudStorageAccount(ConnectionString);
CloudTableClient creationClient = storageAccount.CreateCloudTableClient();
creationClient.DefaultRequestOptions.RetryPolicy = AzureTableDefaultPolicies.TableCreationRetryPolicy;
creationClient.DefaultRequestOptions.ServerTimeout = AzureTableDefaultPolicies.TableCreationTimeout;
// Values supported can be AtomPub, Json, JsonFullMetadata or JsonNoMetadata with Json being the default value
creationClient.DefaultRequestOptions.PayloadFormat = TablePayloadFormat.JsonNoMetadata;
return creationClient;
}
catch (Exception exc)
{
Logger.Error((int)Utilities.ErrorCode.AzureTable_18, "Error creating CloudTableCreationClient.", exc);
throw;
}
}
private void CheckAlertWriteError(string operation, object data1, string data2, Exception exc)
{
HttpStatusCode httpStatusCode;
string restStatus;
if(AzureStorageUtils.EvaluateException(exc, out httpStatusCode, out restStatus) && AzureStorageUtils.IsContentionError(httpStatusCode))
{
// log at Verbose, since failure on conditional is not not an error. Will analyze and warn later, if required.
if(Logger.IsEnabled(LogLevel.Debug)) Logger.Debug((int)Utilities.ErrorCode.AzureTable_13,
$"Intermediate Azure table write error {operation} to table {TableName} data1 {(data1 ?? "null")} data2 {(data2 ?? "null")}", exc);
}
else
{
Logger.Error((int)Utilities.ErrorCode.AzureTable_14,
$"Azure table access write error {operation} to table {TableName} entry {data1}", exc);
}
}
private void CheckAlertSlowAccess(DateTime startOperation, string operation)
{
var timeSpan = DateTime.UtcNow - startOperation;
if (timeSpan > AzureTableDefaultPolicies.TableOperationTimeout)
{
Logger.Warn((int)Utilities.ErrorCode.AzureTable_15, "Slow access to Azure Table {0} for {1}, which took {2}.", TableName, operation, timeSpan);
}
}
/// <summary>
/// Helper functions for building table queries.
/// </summary>
private class TableQueryFilterBuilder
{
/// <summary>
/// Builds query string to match partitionkey
/// </summary>
/// <param name="partitionKey"></param>
/// <returns></returns>
public static string MatchPartitionKeyFilter(string partitionKey)
{
return TableQuery.GenerateFilterCondition("PartitionKey", QueryComparisons.Equal, partitionKey);
}
/// <summary>
/// Builds query string to match rowkey
/// </summary>
/// <param name="rowKey"></param>
/// <returns></returns>
public static string MatchRowKeyFilter(string rowKey)
{
return TableQuery.GenerateFilterCondition("RowKey", QueryComparisons.Equal, rowKey);
}
/// <summary>
/// Builds a query string that matches a specific partitionkey and rowkey.
/// </summary>
/// <param name="partitionKey"></param>
/// <param name="rowKey"></param>
/// <returns></returns>
public static string MatchPartitionKeyAndRowKeyFilter(string partitionKey, string rowKey)
{
return TableQuery.CombineFilters(MatchPartitionKeyFilter(partitionKey), TableOperators.And,
MatchRowKeyFilter(rowKey));
}
}
}
internal static class TableDataManagerInternalExtensions
{
internal static IEnumerable<IEnumerable<TItem>> ToBatch<TItem>(this IEnumerable<TItem> source, int size)
{
using (IEnumerator<TItem> enumerator = source.GetEnumerator())
while (enumerator.MoveNext())
yield return Take(enumerator, size);
}
private static IEnumerable<TItem> Take<TItem>(IEnumerator<TItem> source, int size)
{
int i = 0;
do
yield return source.Current;
while (++i < size && source.MoveNext());
}
}
}
| |
using System.Collections.Generic;
using System.Linq;
using System.Globalization;
using TribalWars.Tools;
using TribalWars.Villages;
using TribalWars.Worlds;
namespace TribalWars.Controls.AccordeonLocation
{
#region Enums
public enum SearchForEnum
{
Players = 0,
Tribes = 1,
Villages = 2
}
#endregion
/// <summary>
/// Wrapper for world searching options
/// </summary>
public class FinderOptions
{
#region Properties
/// <summary>
/// Gets a value indicating what to search for
/// </summary>
public SearchForEnum SearchFor { get; set; }
/// <summary>
/// Gets or sets the tribe the results need to be filtered on
/// </summary>
public Tribe Tribe { get; set; }
/// <summary>
/// Gets or sets the search string
/// </summary>
public string Text { get; set; }
/// <summary>
/// Gets or sets the area that needs to be evaluated
/// </summary>
public FinderLocationEnum EvaluatedArea { get; set; }
/// <summary>
/// Gets or sets the beginning point search range
/// </summary>
public int PointsBetweenStart { private get; set; }
/// <summary>
/// Gets or sets the end point search range
/// </summary>
public int PointsBetweenEnd { private get; set; }
/// <summary>
/// Gets or sets the limit of search results
/// </summary>
public int ResultLimit { private get; set; }
/// <summary>
/// Gets or sets the specific searching method
/// </summary>
public FinderOptionsEnum SearchStrategy { get; set; }
#endregion
#region Constructors
public FinderOptions(SearchForEnum search)
{
SearchFor = search;
}
#endregion
#region Match Lists
/// <summary>
/// Finds all players matching the options
/// </summary>
public IEnumerable<Player> PlayerMatches()
{
switch (EvaluatedArea)
{
case FinderLocationEnum.VisibleMap:
return PlayerMatches(new List<Player>(World.Default.Players.Where(World.Default.Map.Display.IsVisible)));
case FinderLocationEnum.EntireMap:
return PlayerMatches(new List<Player>(World.Default.Players));
case FinderLocationEnum.Polygon:
return PlayerMatches(World.Default.Map.Manipulators.PolygonManipulator.GetAllPolygonVillages().GetPlayers().ToList());
case FinderLocationEnum.ActiveRectangle:
var villagesInActiveRectangle = World.Default.Villages.Values.Where(x => World.Default.Monitor.ActiveRectangle.Contains(x.Location));
return PlayerMatches(villagesInActiveRectangle.GetPlayers().ToList());
}
return null;
}
/// <summary>
/// Finds all players matching the options
/// </summary>
public IEnumerable<Player> PlayerMatches(List<Player> list)
{
var outList = new List<Player>();
int results = 0;
if (SearchStrategy == FinderOptionsEnum.Strongest)
list.Sort();
foreach (Player ply in list)
{
if (Match(ply))
{
outList.Add(ply);
results++;
}
if (ResultLimit != 0 && results == ResultLimit)
return outList;
}
return outList;
}
/// <summary>
/// Finds all tribes matching the options
/// </summary>
public IEnumerable<Tribe> TribeMatches()
{
switch (EvaluatedArea)
{
case FinderLocationEnum.VisibleMap:
return TribeMatches(new List<Tribe>(World.Default.Tribes.Where(World.Default.Map.Display.IsVisible)));
case FinderLocationEnum.EntireMap:
return TribeMatches(new List<Tribe>(World.Default.Tribes));
case FinderLocationEnum.Polygon:
return TribeMatches(World.Default.Map.Manipulators.PolygonManipulator.GetAllPolygonVillages().GetTribes().ToList());
case FinderLocationEnum.ActiveRectangle:
var villagesInActiveRectangle = World.Default.Villages.Values.Where(x => World.Default.Monitor.ActiveRectangle.Contains(x.Location));
return TribeMatches(villagesInActiveRectangle.GetTribes().ToList());
}
return null;
}
/// <summary>
/// Finds all tribes matching the options
/// </summary>
public IEnumerable<Tribe> TribeMatches(List<Tribe> list)
{
var outList = new List<Tribe>();
int results = 0;
if (SearchStrategy == FinderOptionsEnum.Strongest)
list.Sort();
foreach (Tribe tribe in list)
{
if (Match(tribe))
{
outList.Add(tribe);
results++;
}
if (ResultLimit != 0 && results == ResultLimit)
return outList;
}
return outList;
}
/// <summary>
/// Finds all villages matching the options
/// </summary>
public IEnumerable<Village> VillageMatches()
{
switch (EvaluatedArea)
{
case FinderLocationEnum.EntireMap:
return VillageMatches(new List<Village>(World.Default.Villages.Values));
case FinderLocationEnum.VisibleMap:
return VillageMatches(new List<Village>(World.Default.Villages.Values.Where(World.Default.Map.Display.IsVisible)));
case FinderLocationEnum.Polygon:
return VillageMatches(World.Default.Map.Manipulators.PolygonManipulator.GetAllPolygonVillages().ToList());
case FinderLocationEnum.ActiveRectangle:
var list = World.Default.Villages.Values.Where(village => World.Default.Monitor.ActiveRectangle.Contains(village.Location)).ToList();
return VillageMatches(list);
}
return null;
}
/// <summary>
/// Finds all villages matching the options
/// </summary>
public IEnumerable<Village> VillageMatches(List<Village> list)
{
var outList = new List<Village>();
int results = 0;
if (SearchStrategy == FinderOptionsEnum.Strongest)
list.Sort();
foreach (Village village in list)
{
if (Match(village))
{
outList.Add(village);
results++;
}
if (ResultLimit != 0 && results == ResultLimit)
return outList;
}
return outList;
}
#endregion
#region Single Match
/// <summary>
/// Checks if the player matches the current search criteria
/// </summary>
public bool Match(Player ply)
{
if (Tribe != null && !Tribe.Equals(ply.Tribe))
return false;
if (!string.IsNullOrEmpty(Text) && !ply.Name.ToUpper(CultureInfo.InvariantCulture).Contains(Text))
{
return false;
}
switch (SearchStrategy)
{
case FinderOptionsEnum.Inactives:
if (ply.PreviousPlayerDetails == null)
return false;
if (ply.Any(village => village.PreviousVillageDetails == null || village.Points > village.PreviousVillageDetails.Points))
return false;
break;
case FinderOptionsEnum.LostPoints:
if (ply.PreviousPlayerDetails == null || ply.Points >= ply.PreviousPlayerDetails.Points)
return false;
break;
case FinderOptionsEnum.TribeChange:
if (!ply.TribeChange)
return false;
break;
}
if (ply.Points < PointsBetweenStart || (PointsBetweenEnd > 0 && ply.Points > PointsBetweenEnd))
return false;
return true;
}
/// <summary>
/// Checks if the tribe matches the current search criteria
/// </summary>
public bool Match(Tribe tribe)
{
if (Text.Length != 0 && !tribe.Name.ToUpper(CultureInfo.InvariantCulture).Contains(Text) && !tribe.Tag.ToUpper(CultureInfo.InvariantCulture).Contains(Text))
{
return false;
}
if (tribe.AllPoints < PointsBetweenStart || (PointsBetweenEnd > 0 && tribe.AllPoints > PointsBetweenEnd))
return false;
return true;
}
/// <summary>
/// Checks if the village matches the current search criteria
/// </summary>
public bool Match(Village village)
{
if (SearchStrategy == FinderOptionsEnum.Nobled)
{
if (!village.HasPlayer || village.PreviousVillageDetails == null || village.Player.Equals(village.PreviousVillageDetails.Player))
return false;
if (Tribe != null &&
(
(!village.HasTribe || !Tribe.Equals(village.Player.Tribe))
&&
(!village.PreviousVillageDetails.HasTribe || !Tribe.Equals(village.PreviousVillageDetails.Player.Tribe))
)
)
return false;
}
else
{
if (Tribe != null && (!village.HasTribe || !Tribe.Equals(village.Player.Tribe)))
return false;
switch (SearchStrategy)
{
case FinderOptionsEnum.NewInactives:
if (village.HasPlayer || village.PreviousVillageDetails == null || !village.PreviousVillageDetails.HasPlayer)
return false;
break;
case FinderOptionsEnum.LostPoints:
if (village.PreviousVillageDetails == null || village.Points >= village.PreviousVillageDetails.Points)
return false;
break;
}
}
if (!string.IsNullOrEmpty(Text) && !village.ToString().ToUpper(CultureInfo.InvariantCulture).Contains(Text))
{
return false;
}
if (village.Points < PointsBetweenStart || (PointsBetweenEnd > 0 && village.Points > PointsBetweenEnd))
return false;
return true;
}
#endregion
#region Enums
/// <summary>
/// List of specific search methods
/// </summary>
public enum FinderOptionsEnum
{
/// <summary>
/// Search all villages
/// </summary>
All = 0,
/// <summary>
/// Search villages that have gone abandoned since last data download
/// </summary>
NewInactives = 2,
/// <summary>
/// Search the strongest players and tribes
/// </summary>
Strongest = 1,
/// <summary>
/// Search villages that were nobled since the last data download
/// </summary>
Nobled = 5,
/// <summary>
/// Search villages that lost points since the last data download
/// </summary>
LostPoints = 4,
/// <summary>
/// Search players that have not grown points since the last data download
/// </summary>
Inactives = 3,
/// <summary>
/// Search players that have changed tribes since the last data download
/// </summary>
TribeChange = 6
}
/// <summary>
/// List of the different locations where to search
/// </summary>
public enum FinderLocationEnum
{
/// <summary>
/// Loop through the entire map
/// </summary>
EntireMap = 0,
/// <summary>
/// Loop through the visible part of the map
/// </summary>
VisibleMap = 1,
/// <summary>
/// Loop through the active rectangle
/// </summary>
ActiveRectangle = 2,
/// <summary>
/// Loop throught the selected polygon(s)
/// </summary>
Polygon = 3
}
#endregion
}
}
| |
#region License
//
// Copyright (c) 2007-2018, Sean Chambers <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion
using System;
using System.Collections.Generic;
using System.Reflection;
using FluentMigrator.Infrastructure;
using FluentMigrator.Runner.Initialization.AssemblyLoader;
using FluentMigrator.Runner.Logging;
using FluentMigrator.Runner.Processors;
using JetBrains.Annotations;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
namespace FluentMigrator.Runner.Initialization
{
public class TaskExecutor
{
[NotNull]
private readonly ILogger _logger;
[NotNull]
private readonly IAssemblySource _assemblySource;
private readonly RunnerOptions _runnerOptions;
[NotNull, ItemNotNull]
private readonly Lazy<IServiceProvider> _lazyServiceProvider;
private IReadOnlyCollection<Assembly> _assemblies;
public TaskExecutor(
[NotNull] ILogger<TaskExecutor> logger,
[NotNull] IAssemblySource assemblySource,
[NotNull] IOptions<RunnerOptions> runnerOptions,
[NotNull] IServiceProvider serviceProvider)
{
_logger = logger;
_assemblySource = assemblySource;
_runnerOptions = runnerOptions.Value;
#pragma warning disable 612
ConnectionStringProvider = serviceProvider.GetService<IConnectionStringProvider>();
#pragma warning restore 612
_lazyServiceProvider = new Lazy<IServiceProvider>(() => serviceProvider);
}
[Obsolete]
public TaskExecutor([NotNull] IRunnerContext runnerContext)
{
var runnerCtxt = runnerContext ?? throw new ArgumentNullException(nameof(runnerContext));
_logger = new AnnouncerFluentMigratorLogger(runnerCtxt.Announcer);
_runnerOptions = new RunnerOptions(runnerCtxt);
var asmLoaderFactory = new AssemblyLoaderFactory();
_assemblySource = new AssemblySource(() => new AssemblyCollection(asmLoaderFactory.GetTargetAssemblies(runnerCtxt.Targets)));
ConnectionStringProvider = new DefaultConnectionStringProvider();
_lazyServiceProvider = new Lazy<IServiceProvider>(
() => runnerContext
.CreateServices(
ConnectionStringProvider,
asmLoaderFactory)
.BuildServiceProvider(validateScopes: true));
}
[Obsolete("Ony the statically provided factories are accessed")]
public TaskExecutor(
[NotNull] IRunnerContext runnerContext,
[CanBeNull] IConnectionStringProvider connectionStringProvider,
[NotNull] AssemblyLoaderFactory assemblyLoaderFactory,
// ReSharper disable once UnusedParameter.Local
MigrationProcessorFactoryProvider factoryProvider)
: this(
runnerContext,
assemblyLoaderFactory,
connectionStringProvider)
{
}
[Obsolete]
public TaskExecutor(
[NotNull] IRunnerContext runnerContext,
[NotNull] AssemblyLoaderFactory assemblyLoaderFactory,
[CanBeNull] IConnectionStringProvider connectionStringProvider = null)
{
var runnerCtxt = runnerContext ?? throw new ArgumentNullException(nameof(runnerContext));
_logger = new AnnouncerFluentMigratorLogger(runnerCtxt.Announcer);
_runnerOptions = new RunnerOptions(runnerCtxt);
ConnectionStringProvider = connectionStringProvider;
var asmLoaderFactory = assemblyLoaderFactory ?? throw new ArgumentNullException(nameof(assemblyLoaderFactory));
_assemblySource = new AssemblySource(() => new AssemblyCollection(asmLoaderFactory.GetTargetAssemblies(runnerCtxt.Targets)));
_lazyServiceProvider = new Lazy<IServiceProvider>(
() => runnerContext
.CreateServices(
connectionStringProvider,
asmLoaderFactory)
.BuildServiceProvider(validateScopes: true));
}
/// <summary>
/// Gets the current migration runner
/// </summary>
/// <remarks>
/// This will only be set during a migration operation
/// </remarks>
[CanBeNull]
protected IMigrationRunner Runner { get; set; }
/// <summary>
/// Gets the connection string provider
/// </summary>
[CanBeNull]
[Obsolete]
protected IConnectionStringProvider ConnectionStringProvider { get; }
/// <summary>
/// Gets the service provider used to instantiate all migration services
/// </summary>
[NotNull]
protected IServiceProvider ServiceProvider => _lazyServiceProvider.Value;
[Obsolete]
protected virtual IEnumerable<Assembly> GetTargetAssemblies()
{
return _assemblies ?? (_assemblies = _assemblySource.Assemblies);
}
/// <summary>
/// Will be called during the runner scope initialization
/// </summary>
/// <remarks>
/// The <see cref="Runner"/> isn't initialized yet.
/// </remarks>
protected virtual void Initialize()
{
}
public void Execute()
{
using (var scope = new RunnerScope(this))
{
switch (_runnerOptions.Task)
{
case null:
case "":
case "migrate":
case "migrate:up":
if (_runnerOptions.Version != 0)
scope.Runner.MigrateUp(_runnerOptions.Version);
else
scope.Runner.MigrateUp();
break;
case "rollback":
if (_runnerOptions.Steps == 0)
_runnerOptions.Steps = 1;
scope.Runner.Rollback(_runnerOptions.Steps);
break;
case "rollback:toversion":
scope.Runner.RollbackToVersion(_runnerOptions.Version);
break;
case "rollback:all":
scope.Runner.RollbackToVersion(0);
break;
case "migrate:down":
scope.Runner.MigrateDown(_runnerOptions.Version);
break;
case "validateversionorder":
scope.Runner.ValidateVersionOrder();
break;
case "listmigrations":
scope.Runner.ListMigrations();
break;
}
}
_logger.LogSay("Task completed.");
}
/// <summary>
/// Checks whether the current task will actually run any migrations.
/// Can be used to decide whether it's necessary perform a backup before the migrations are executed.
/// </summary>
public bool HasMigrationsToApply()
{
using (var scope = new RunnerScope(this))
{
switch (_runnerOptions.Task)
{
case null:
case "":
case "migrate":
case "migrate:up":
if (_runnerOptions.Version != 0)
return scope.Runner.HasMigrationsToApplyUp(_runnerOptions.Version);
return scope.Runner.HasMigrationsToApplyUp();
case "rollback":
case "rollback:all":
// Number of steps doesn't matter as long as there's at least
// one migration applied (at least that one will be rolled back)
return scope.Runner.HasMigrationsToApplyRollback();
case "rollback:toversion":
case "migrate:down":
return scope.Runner.HasMigrationsToApplyDown(_runnerOptions.Version);
default:
return false;
}
}
}
private class RunnerScope : IDisposable
{
[NotNull]
private readonly TaskExecutor _executor;
[CanBeNull]
private readonly IServiceScope _serviceScope;
private readonly bool _hasCustomRunner;
public RunnerScope([NotNull] TaskExecutor executor)
{
_executor = executor;
executor.Initialize();
if (executor.Runner != null)
{
Runner = executor.Runner;
_hasCustomRunner = true;
}
else
{
var serviceScope = executor.ServiceProvider.CreateScope();
_serviceScope = serviceScope;
_executor.Runner = Runner = serviceScope.ServiceProvider.GetRequiredService<IMigrationRunner>();
}
}
public IMigrationRunner Runner { get; }
public void Dispose()
{
if (_hasCustomRunner)
{
Runner.Processor.Dispose();
}
else
{
_executor.Runner = null;
_serviceScope?.Dispose();
}
}
}
}
}
| |
// Copyright (c) The Avalonia Project. All rights reserved.
// Licensed under the MIT license. See licence.md file in the project root for full license information.
using System;
using System.Reactive;
using System.Reactive.Disposables;
using System.Reactive.Linq;
using System.Reactive.Subjects;
using Avalonia.Data;
using Avalonia.Reactive;
namespace Avalonia
{
/// <summary>
/// Provides extension methods for <see cref="AvaloniaObject"/> and related classes.
/// </summary>
public static class AvaloniaObjectExtensions
{
/// <summary>
/// Gets an observable for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <returns>
/// An observable which fires immediately with the current value of the property on the
/// object and subsequently each time the property value changes.
/// </returns>
public static IObservable<object> GetObservable(this IAvaloniaObject o, AvaloniaProperty property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return new AvaloniaObservable<object>(
observer =>
{
EventHandler<AvaloniaPropertyChangedEventArgs> handler = (s, e) =>
{
if (e.Property == property)
{
observer.OnNext(e.NewValue);
}
};
observer.OnNext(o.GetValue(property));
o.PropertyChanged += handler;
return Disposable.Create(() =>
{
o.PropertyChanged -= handler;
});
},
GetDescription(o, property));
}
/// <summary>
/// Gets an observable for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <typeparam name="T">The property type.</typeparam>
/// <param name="property">The property.</param>
/// <returns>
/// An observable which fires immediately with the current value of the property on the
/// object and subsequently each time the property value changes.
/// </returns>
public static IObservable<T> GetObservable<T>(this IAvaloniaObject o, AvaloniaProperty<T> property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return o.GetObservable((AvaloniaProperty)property).Cast<T>();
}
/// <summary>
/// Gets an observable for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <typeparam name="T">The type of the property.</typeparam>
/// <param name="property">The property.</param>
/// <returns>
/// An observable which when subscribed pushes the old and new values of the property each
/// time it is changed. Note that the observable returned from this method does not fire
/// with the current value of the property immediately.
/// </returns>
public static IObservable<Tuple<T, T>> GetObservableWithHistory<T>(
this IAvaloniaObject o,
AvaloniaProperty<T> property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return new AvaloniaObservable<Tuple<T, T>>(
observer =>
{
EventHandler<AvaloniaPropertyChangedEventArgs> handler = (s, e) =>
{
if (e.Property == property)
{
observer.OnNext(Tuple.Create((T)e.OldValue, (T)e.NewValue));
}
};
o.PropertyChanged += handler;
return Disposable.Create(() =>
{
o.PropertyChanged -= handler;
});
},
GetDescription(o, property));
}
/// <summary>
/// Gets a subject for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <param name="priority">
/// The priority with which binding values are written to the object.
/// </param>
/// <returns>
/// An <see cref="ISubject{Object}"/> which can be used for two-way binding to/from the
/// property.
/// </returns>
public static ISubject<object> GetSubject(
this IAvaloniaObject o,
AvaloniaProperty property,
BindingPriority priority = BindingPriority.LocalValue)
{
// TODO: Subject.Create<T> is not yet in stable Rx : once it is, remove the
// AnonymousSubject classes and use Subject.Create<T>.
var output = new Subject<object>();
var result = new AnonymousSubject<object>(
Observer.Create<object>(
x => output.OnNext(x),
e => output.OnError(e),
() => output.OnCompleted()),
o.GetObservable(property));
o.Bind(property, output, priority);
return result;
}
/// <summary>
/// Gets a subject for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <typeparam name="T">The property type.</typeparam>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <param name="priority">
/// The priority with which binding values are written to the object.
/// </param>
/// <returns>
/// An <see cref="ISubject{T}"/> which can be used for two-way binding to/from the
/// property.
/// </returns>
public static ISubject<T> GetSubject<T>(
this IAvaloniaObject o,
AvaloniaProperty<T> property,
BindingPriority priority = BindingPriority.LocalValue)
{
// TODO: Subject.Create<T> is not yet in stable Rx : once it is, remove the
// AnonymousSubject classes from this file and use Subject.Create<T>.
var output = new Subject<T>();
var result = new AnonymousSubject<T>(
Observer.Create<T>(
x => output.OnNext(x),
e => output.OnError(e),
() => output.OnCompleted()),
o.GetObservable(property));
o.Bind(property, output, priority);
return result;
}
/// <summary>
/// Gets a weak observable for a <see cref="AvaloniaProperty"/>.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property.</param>
/// <returns>An observable.</returns>
public static IObservable<object> GetWeakObservable(this IAvaloniaObject o, AvaloniaProperty property)
{
Contract.Requires<ArgumentNullException>(o != null);
Contract.Requires<ArgumentNullException>(property != null);
return new WeakPropertyChangedObservable(
new WeakReference<IAvaloniaObject>(o),
property,
GetDescription(o, property));
}
/// <summary>
/// Binds a property on an <see cref="IAvaloniaObject"/> to an <see cref="IBinding"/>.
/// </summary>
/// <param name="target">The object.</param>
/// <param name="property">The property to bind.</param>
/// <param name="binding">The binding.</param>
/// <param name="anchor">
/// An optional anchor from which to locate required context. When binding to objects that
/// are not in the logical tree, certain types of binding need an anchor into the tree in
/// order to locate named controls or resources. The <paramref name="anchor"/> parameter
/// can be used to provice this context.
/// </param>
/// <returns>An <see cref="IDisposable"/> which can be used to cancel the binding.</returns>
public static IDisposable Bind(
this IAvaloniaObject target,
AvaloniaProperty property,
IBinding binding,
object anchor = null)
{
Contract.Requires<ArgumentNullException>(target != null);
Contract.Requires<ArgumentNullException>(property != null);
Contract.Requires<ArgumentNullException>(binding != null);
var result = binding.Initiate(target, property, anchor);
if (result != null)
{
return BindingOperations.Apply(target, property, result, anchor);
}
else
{
return Disposable.Empty;
}
}
/// <summary>
/// Subscribes to a property changed notifications for changes that originate from a
/// <typeparamref name="TTarget"/>.
/// </summary>
/// <typeparam name="TTarget">The type of the property change sender.</typeparam>
/// <param name="observable">The property changed observable.</param>
/// <param name="action">
/// The method to call. The parameters are the sender and the event args.
/// </param>
/// <returns>A disposable that can be used to terminate the subscription.</returns>
public static IDisposable AddClassHandler<TTarget>(
this IObservable<AvaloniaPropertyChangedEventArgs> observable,
Action<TTarget, AvaloniaPropertyChangedEventArgs> action)
where TTarget : AvaloniaObject
{
return observable.Subscribe(e =>
{
if (e.Sender is TTarget)
{
action((TTarget)e.Sender, e);
}
});
}
/// <summary>
/// Subscribes to a property changed notifications for changes that originate from a
/// <typeparamref name="TTarget"/>.
/// </summary>
/// <typeparam name="TTarget">The type of the property change sender.</typeparam>
/// <param name="observable">The property changed observable.</param>
/// <param name="handler">Given a TTarget, returns the handler.</param>
/// <returns>A disposable that can be used to terminate the subscription.</returns>
public static IDisposable AddClassHandler<TTarget>(
this IObservable<AvaloniaPropertyChangedEventArgs> observable,
Func<TTarget, Action<AvaloniaPropertyChangedEventArgs>> handler)
where TTarget : class
{
return observable.Subscribe(e => SubscribeAdapter(e, handler));
}
/// <summary>
/// Gets a description of a property that van be used in observables.
/// </summary>
/// <param name="o">The object.</param>
/// <param name="property">The property</param>
/// <returns>The description.</returns>
private static string GetDescription(IAvaloniaObject o, AvaloniaProperty property)
{
return $"{o.GetType().Name}.{property.Name}";
}
/// <summary>
/// Observer method for <see cref="AddClassHandler{TTarget}(IObservable{AvaloniaPropertyChangedEventArgs},
/// Func{TTarget, Action{AvaloniaPropertyChangedEventArgs}})"/>.
/// </summary>
/// <typeparam name="TTarget">The sender type to accept.</typeparam>
/// <param name="e">The event args.</param>
/// <param name="handler">Given a TTarget, returns the handler.</param>
private static void SubscribeAdapter<TTarget>(
AvaloniaPropertyChangedEventArgs e,
Func<TTarget, Action<AvaloniaPropertyChangedEventArgs>> handler)
where TTarget : class
{
var target = e.Sender as TTarget;
if (target != null)
{
handler(target)(e);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System.Buffers;
using System.Linq;
using System.Collections.Generic;
using System.Text;
using System.Net.Http.HPack;
using Xunit;
#if KESTREL
using Microsoft.AspNetCore.Server.Kestrel.Core.Internal.Http;
#endif
namespace System.Net.Http.Unit.Tests.HPack
{
public class HPackDecoderTests
{
private const int DynamicTableInitialMaxSize = 4096;
private const int MaxHeaderFieldSize = 8192;
// Indexed Header Field Representation - Static Table - Index 2 (:method: GET)
private static readonly byte[] _indexedHeaderStatic = new byte[] { 0x82 };
// Indexed Header Field Representation - Dynamic Table - Index 62 (first index in dynamic table)
private static readonly byte[] _indexedHeaderDynamic = new byte[] { 0xbe };
// Literal Header Field with Incremental Indexing Representation - New Name
private static readonly byte[] _literalHeaderFieldWithIndexingNewName = new byte[] { 0x40 };
// Literal Header Field with Incremental Indexing Representation - Indexed Name - Index 58 (user-agent)
private static readonly byte[] _literalHeaderFieldWithIndexingIndexedName = new byte[] { 0x7a };
// Literal Header Field without Indexing Representation - New Name
private static readonly byte[] _literalHeaderFieldWithoutIndexingNewName = new byte[] { 0x00 };
// Literal Header Field without Indexing Representation - Indexed Name - Index 58 (user-agent)
private static readonly byte[] _literalHeaderFieldWithoutIndexingIndexedName = new byte[] { 0x0f, 0x2b };
// Literal Header Field Never Indexed Representation - New Name
private static readonly byte[] _literalHeaderFieldNeverIndexedNewName = new byte[] { 0x10 };
// Literal Header Field Never Indexed Representation - Indexed Name - Index 58 (user-agent)
private static readonly byte[] _literalHeaderFieldNeverIndexedIndexedName = new byte[] { 0x1f, 0x2b };
private const string _userAgentString = "user-agent";
private const string _headerNameString = "new-header";
private static readonly byte[] _headerNameBytes = Encoding.ASCII.GetBytes(_headerNameString);
// n e w - h e a d e r *
// 10101000 10111110 00010110 10011100 10100011 10010000 10110110 01111111
private static readonly byte[] _headerNameHuffmanBytes = new byte[] { 0xa8, 0xbe, 0x16, 0x9c, 0xa3, 0x90, 0xb6, 0x7f };
private const string _headerValueString = "value";
private static readonly byte[] _headerValueBytes = Encoding.ASCII.GetBytes(_headerValueString);
// v a l u e *
// 11101110 00111010 00101101 00101111
private static readonly byte[] _headerValueHuffmanBytes = new byte[] { 0xee, 0x3a, 0x2d, 0x2f };
private static readonly byte[] _headerName = new byte[] { (byte)_headerNameBytes.Length }
.Concat(_headerNameBytes)
.ToArray();
private static readonly byte[] _headerNameHuffman = new byte[] { (byte)(0x80 | _headerNameHuffmanBytes.Length) }
.Concat(_headerNameHuffmanBytes)
.ToArray();
private static readonly byte[] _headerValue = new byte[] { (byte)_headerValueBytes.Length }
.Concat(_headerValueBytes)
.ToArray();
private static readonly byte[] _headerValueHuffman = new byte[] { (byte)(0x80 | _headerValueHuffmanBytes.Length) }
.Concat(_headerValueHuffmanBytes)
.ToArray();
// & *
// 11111000 11111111
private static readonly byte[] _huffmanLongPadding = new byte[] { 0x82, 0xf8, 0xff };
// EOS *
// 11111111 11111111 11111111 11111111
private static readonly byte[] _huffmanEos = new byte[] { 0x84, 0xff, 0xff, 0xff, 0xff };
private readonly DynamicTable _dynamicTable;
private readonly HPackDecoder _decoder;
private readonly TestHttpHeadersHandler _handler = new TestHttpHeadersHandler();
public HPackDecoderTests()
{
(_dynamicTable, _decoder) = CreateDecoderAndTable();
}
private static (DynamicTable, HPackDecoder) CreateDecoderAndTable()
{
var dynamicTable = new DynamicTable(DynamicTableInitialMaxSize);
var decoder = new HPackDecoder(DynamicTableInitialMaxSize, MaxHeaderFieldSize, dynamicTable);
return (dynamicTable, decoder);
}
[Fact]
public void DecodesIndexedHeaderField_StaticTableWithValue()
{
_decoder.Decode(_indexedHeaderStatic, endHeaders: true, handler: _handler);
Assert.Equal("GET", _handler.DecodedHeaders[":method"]);
Assert.Equal(":method", _handler.DecodedStaticHeaders[H2StaticTable.MethodGet].Key);
Assert.Equal("GET", _handler.DecodedStaticHeaders[H2StaticTable.MethodGet].Value);
}
[Fact]
public void DecodesIndexedHeaderField_StaticTableWithoutValue()
{
byte[] encoded = _literalHeaderFieldWithIndexingIndexedName
.Concat(_headerValue)
.ToArray();
_decoder.Decode(encoded, endHeaders: true, handler: _handler);
Assert.Equal(_headerValueString, _handler.DecodedHeaders[_userAgentString]);
Assert.Equal(_userAgentString, _handler.DecodedStaticHeaders[H2StaticTable.UserAgent].Key);
Assert.Equal(_headerValueString, _handler.DecodedStaticHeaders[H2StaticTable.UserAgent].Value);
}
[Fact]
public void DecodesIndexedHeaderField_DynamicTable()
{
// Add the header to the dynamic table
_dynamicTable.Insert(_headerNameBytes, _headerValueBytes);
// Index it
_decoder.Decode(_indexedHeaderDynamic, endHeaders: true, handler: _handler);
Assert.Equal(_headerValueString, _handler.DecodedHeaders[_headerNameString]);
}
[Fact]
public void DecodesIndexedHeaderField_OutOfRange_Error()
{
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() =>
_decoder.Decode(_indexedHeaderDynamic, endHeaders: true, handler: _handler));
Assert.Equal(SR.Format(SR.net_http_hpack_invalid_index, 62), exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_NewName()
{
byte[] encoded = _literalHeaderFieldWithIndexingNewName
.Concat(_headerName)
.Concat(_headerValue)
.ToArray();
TestDecodeWithIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_NewName_HuffmanEncodedName()
{
byte[] encoded = _literalHeaderFieldWithIndexingNewName
.Concat(_headerNameHuffman)
.Concat(_headerValue)
.ToArray();
TestDecodeWithIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_NewName_HuffmanEncodedValue()
{
byte[] encoded = _literalHeaderFieldWithIndexingNewName
.Concat(_headerName)
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_NewName_HuffmanEncodedNameAndValue()
{
byte[] encoded = _literalHeaderFieldWithIndexingNewName
.Concat(_headerNameHuffman)
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_IndexedName()
{
byte[] encoded = _literalHeaderFieldWithIndexingIndexedName
.Concat(_headerValue)
.ToArray();
TestDecodeWithIndexing(encoded, _userAgentString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_IndexedName_HuffmanEncodedValue()
{
byte[] encoded = _literalHeaderFieldWithIndexingIndexedName
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithIndexing(encoded, _userAgentString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithIncrementalIndexing_IndexedName_OutOfRange_Error()
{
// 01 (Literal Header Field without Indexing Representation)
// 11 1110 (Indexed Name - Index 62 encoded with 6-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
// Index 62 is the first entry in the dynamic table. If there's nothing there, the decoder should throw.
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(new byte[] { 0x7e }, endHeaders: true, handler: _handler));
Assert.Equal(SR.Format(SR.net_http_hpack_invalid_index, 62), exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_NewName()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(_headerName)
.Concat(_headerValue)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_NewName_HuffmanEncodedName()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(_headerNameHuffman)
.Concat(_headerValue)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_NewName_HuffmanEncodedValue()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(_headerName)
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_NewName_HuffmanEncodedNameAndValue()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(_headerNameHuffman)
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_IndexedName()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingIndexedName
.Concat(_headerValue)
.ToArray();
TestDecodeWithoutIndexing(encoded, _userAgentString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_IndexedName_HuffmanEncodedValue()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingIndexedName
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithoutIndexing(encoded, _userAgentString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldWithoutIndexing_IndexedName_OutOfRange_Error()
{
// 0000 (Literal Header Field without Indexing Representation)
// 1111 0010 1111 (Indexed Name - Index 62 encoded with 4-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
// Index 62 is the first entry in the dynamic table. If there's nothing there, the decoder should throw.
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(new byte[] { 0x0f, 0x2f }, endHeaders: true, handler: _handler));
Assert.Equal(SR.Format(SR.net_http_hpack_invalid_index, 62), exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_NewName()
{
byte[] encoded = _literalHeaderFieldNeverIndexedNewName
.Concat(_headerName)
.Concat(_headerValue)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_NewName_Duplicated()
{
byte[] encoded = _literalHeaderFieldNeverIndexedNewName
.Concat(_headerName)
.Concat(_headerValue)
.ToArray();
encoded = encoded.Concat(encoded).ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_NewName_HuffmanEncodedName()
{
byte[] encoded = _literalHeaderFieldNeverIndexedNewName
.Concat(_headerNameHuffman)
.Concat(_headerValue)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_NewName_HuffmanEncodedValue()
{
byte[] encoded = _literalHeaderFieldNeverIndexedNewName
.Concat(_headerName)
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_NewName_HuffmanEncodedNameAndValue()
{
byte[] encoded = _literalHeaderFieldNeverIndexedNewName
.Concat(_headerNameHuffman)
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithoutIndexing(encoded, _headerNameString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_IndexedName()
{
// 0001 (Literal Header Field Never Indexed Representation)
// 1111 0010 1011 (Indexed Name - Index 58 encoded with 4-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
// Concatenated with value bytes
byte[] encoded = _literalHeaderFieldNeverIndexedIndexedName
.Concat(_headerValue)
.ToArray();
TestDecodeWithoutIndexing(encoded, _userAgentString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_IndexedName_HuffmanEncodedValue()
{
// 0001 (Literal Header Field Never Indexed Representation)
// 1111 0010 1011 (Indexed Name - Index 58 encoded with 4-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
// Concatenated with Huffman encoded value bytes
byte[] encoded = _literalHeaderFieldNeverIndexedIndexedName
.Concat(_headerValueHuffman)
.ToArray();
TestDecodeWithoutIndexing(encoded, _userAgentString, _headerValueString);
}
[Fact]
public void DecodesLiteralHeaderFieldNeverIndexed_IndexedName_OutOfRange_Error()
{
// 0001 (Literal Header Field Never Indexed Representation)
// 1111 0010 1111 (Indexed Name - Index 62 encoded with 4-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
// Index 62 is the first entry in the dynamic table. If there's nothing there, the decoder should throw.
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(new byte[] { 0x1f, 0x2f }, endHeaders: true, handler: _handler));
Assert.Equal(SR.Format(SR.net_http_hpack_invalid_index, 62), exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesDynamicTableSizeUpdate()
{
// 001 (Dynamic Table Size Update)
// 11110 (30 encoded with 5-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
Assert.Equal(DynamicTableInitialMaxSize, _dynamicTable.MaxSize);
_decoder.Decode(new byte[] { 0x3e }, endHeaders: true, handler: _handler);
Assert.Equal(30, _dynamicTable.MaxSize);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesDynamicTableSizeUpdate_AfterIndexedHeaderStatic_Error()
{
// 001 (Dynamic Table Size Update)
// 11110 (30 encoded with 5-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
Assert.Equal(DynamicTableInitialMaxSize, _dynamicTable.MaxSize);
byte[] data = _indexedHeaderStatic.Concat(new byte[] { 0x3e }).ToArray();
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(data, endHeaders: true, handler: _handler));
Assert.Equal(SR.net_http_hpack_late_dynamic_table_size_update, exception.Message);
}
[Fact]
public void DecodesDynamicTableSizeUpdate_AfterIndexedHeaderStatic_SubsequentDecodeCall_Error()
{
Assert.Equal(DynamicTableInitialMaxSize, _dynamicTable.MaxSize);
_decoder.Decode(_indexedHeaderStatic, endHeaders: false, handler: _handler);
Assert.Equal("GET", _handler.DecodedHeaders[":method"]);
// 001 (Dynamic Table Size Update)
// 11110 (30 encoded with 5-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
byte[] data = new byte[] { 0x3e };
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(data, endHeaders: true, handler: _handler));
Assert.Equal(SR.net_http_hpack_late_dynamic_table_size_update, exception.Message);
}
[Fact]
public void DecodesDynamicTableSizeUpdate_AfterIndexedHeaderStatic_ResetAfterEndHeaders_Succeeds()
{
Assert.Equal(DynamicTableInitialMaxSize, _dynamicTable.MaxSize);
_decoder.Decode(_indexedHeaderStatic, endHeaders: true, handler: _handler);
Assert.Equal("GET", _handler.DecodedHeaders[":method"]);
// 001 (Dynamic Table Size Update)
// 11110 (30 encoded with 5-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
_decoder.Decode(new byte[] { 0x3e }, endHeaders: true, handler: _handler);
Assert.Equal(30, _dynamicTable.MaxSize);
}
[Fact]
public void DecodesDynamicTableSizeUpdate_GreaterThanLimit_Error()
{
// 001 (Dynamic Table Size Update)
// 11111 11100010 00011111 (4097 encoded with 5-bit prefix - see http://httpwg.org/specs/rfc7541.html#integer.representation)
Assert.Equal(DynamicTableInitialMaxSize, _dynamicTable.MaxSize);
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() =>
_decoder.Decode(new byte[] { 0x3f, 0xe2, 0x1f }, endHeaders: true, handler: _handler));
Assert.Equal(SR.Format(SR.net_http_hpack_large_table_size_update, 4097, DynamicTableInitialMaxSize), exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesStringLength_GreaterThanLimit_Error()
{
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(new byte[] { 0xff, 0x82, 0x3f }) // 8193 encoded with 7-bit prefix
.ToArray();
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(encoded, endHeaders: true, handler: _handler));
Assert.Equal(SR.Format(SR.net_http_headers_exceeded_length, MaxHeaderFieldSize), exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
[Fact]
public void DecodesStringLength_LimitConfigurable()
{
HPackDecoder decoder = new HPackDecoder(DynamicTableInitialMaxSize, MaxHeaderFieldSize + 1);
string string8193 = new string('a', MaxHeaderFieldSize + 1);
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(new byte[] { 0x7f, 0x82, 0x3f }) // 8193 encoded with 7-bit prefix, no Huffman encoding
.Concat(Encoding.ASCII.GetBytes(string8193))
.Concat(new byte[] { 0x7f, 0x82, 0x3f }) // 8193 encoded with 7-bit prefix, no Huffman encoding
.Concat(Encoding.ASCII.GetBytes(string8193))
.ToArray();
decoder.Decode(encoded, endHeaders: true, handler: _handler);
Assert.Equal(string8193, _handler.DecodedHeaders[string8193]);
}
[Fact]
public void DecodesStringLength_IndividualBytes()
{
HPackDecoder decoder = new HPackDecoder(DynamicTableInitialMaxSize, MaxHeaderFieldSize + 1);
string string8193 = new string('a', MaxHeaderFieldSize + 1);
byte[] encoded = _literalHeaderFieldWithoutIndexingNewName
.Concat(new byte[] { 0x7f, 0x82, 0x3f }) // 8193 encoded with 7-bit prefix, no Huffman encoding
.Concat(Encoding.ASCII.GetBytes(string8193))
.Concat(new byte[] { 0x7f, 0x82, 0x3f }) // 8193 encoded with 7-bit prefix, no Huffman encoding
.Concat(Encoding.ASCII.GetBytes(string8193))
.ToArray();
for (int i = 0; i < encoded.Length; i++)
{
bool end = i + 1 == encoded.Length;
decoder.Decode(new byte[] { encoded[i] }, endHeaders: end, handler: _handler);
}
Assert.Equal(string8193, _handler.DecodedHeaders[string8193]);
}
[Fact]
public void DecodesHeaderNameAndValue_SeparateSegments()
{
HPackDecoder decoder = new HPackDecoder(DynamicTableInitialMaxSize, MaxHeaderFieldSize + 1);
string string8193 = new string('a', MaxHeaderFieldSize + 1);
byte[][] segments = new byte[][]
{
_literalHeaderFieldWithoutIndexingNewName,
new byte[] { 0x7f, 0x82, 0x3f }, // 8193 encoded with 7-bit prefix, no Huffman encoding
Encoding.ASCII.GetBytes(string8193),
new byte[] { 0x7f, 0x82, 0x3f }, // 8193 encoded with 7-bit prefix, no Huffman encoding
Encoding.ASCII.GetBytes(string8193)
};
for (int i = 0; i < segments.Length; i++)
{
bool end = i + 1 == segments.Length;
decoder.Decode(segments[i], endHeaders: end, handler: _handler);
}
Assert.Equal(string8193, _handler.DecodedHeaders[string8193]);
}
public static readonly TheoryData<byte[]> _incompleteHeaderBlockData = new TheoryData<byte[]>
{
// Indexed Header Field Representation - incomplete index encoding
new byte[] { 0xff },
// Literal Header Field with Incremental Indexing Representation - New Name - incomplete header name length encoding
new byte[] { 0x40, 0x7f },
// Literal Header Field with Incremental Indexing Representation - New Name - incomplete header name
new byte[] { 0x40, 0x01 },
new byte[] { 0x40, 0x02, 0x61 },
// Literal Header Field with Incremental Indexing Representation - New Name - incomplete header value length encoding
new byte[] { 0x40, 0x01, 0x61, 0x7f },
// Literal Header Field with Incremental Indexing Representation - New Name - incomplete header value
new byte[] { 0x40, 0x01, 0x61, 0x01 },
new byte[] { 0x40, 0x01, 0x61, 0x02, 0x61 },
// Literal Header Field with Incremental Indexing Representation - Indexed Name - incomplete index encoding
new byte[] { 0x7f },
// Literal Header Field with Incremental Indexing Representation - Indexed Name - incomplete header value length encoding
new byte[] { 0x7a, 0xff },
// Literal Header Field with Incremental Indexing Representation - Indexed Name - incomplete header value
new byte[] { 0x7a, 0x01 },
new byte[] { 0x7a, 0x02, 0x61 },
// Literal Header Field without Indexing - New Name - incomplete header name length encoding
new byte[] { 0x00, 0xff },
// Literal Header Field without Indexing - New Name - incomplete header name
new byte[] { 0x00, 0x01 },
new byte[] { 0x00, 0x02, 0x61 },
// Literal Header Field without Indexing - New Name - incomplete header value length encoding
new byte[] { 0x00, 0x01, 0x61, 0xff },
// Literal Header Field without Indexing - New Name - incomplete header value
new byte[] { 0x00, 0x01, 0x61, 0x01 },
new byte[] { 0x00, 0x01, 0x61, 0x02, 0x61 },
// Literal Header Field without Indexing Representation - Indexed Name - incomplete index encoding
new byte[] { 0x0f },
// Literal Header Field without Indexing Representation - Indexed Name - incomplete header value length encoding
new byte[] { 0x02, 0xff },
// Literal Header Field without Indexing Representation - Indexed Name - incomplete header value
new byte[] { 0x02, 0x01 },
new byte[] { 0x02, 0x02, 0x61 },
// Literal Header Field Never Indexed - New Name - incomplete header name length encoding
new byte[] { 0x10, 0xff },
// Literal Header Field Never Indexed - New Name - incomplete header name
new byte[] { 0x10, 0x01 },
new byte[] { 0x10, 0x02, 0x61 },
// Literal Header Field Never Indexed - New Name - incomplete header value length encoding
new byte[] { 0x10, 0x01, 0x61, 0xff },
// Literal Header Field Never Indexed - New Name - incomplete header value
new byte[] { 0x10, 0x01, 0x61, 0x01 },
new byte[] { 0x10, 0x01, 0x61, 0x02, 0x61 },
// Literal Header Field Never Indexed Representation - Indexed Name - incomplete index encoding
new byte[] { 0x1f },
// Literal Header Field Never Indexed Representation - Indexed Name - incomplete header value length encoding
new byte[] { 0x12, 0xff },
// Literal Header Field Never Indexed Representation - Indexed Name - incomplete header value
new byte[] { 0x12, 0x01 },
new byte[] { 0x12, 0x02, 0x61 },
// Dynamic Table Size Update - incomplete max size encoding
new byte[] { 0x3f }
};
[Theory]
[MemberData(nameof(_incompleteHeaderBlockData))]
public void DecodesIncompleteHeaderBlock_Error(byte[] encoded)
{
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(encoded, endHeaders: true, handler: _handler));
Assert.Equal(SR.net_http_hpack_incomplete_header_block, exception.Message);
Assert.Empty(_handler.DecodedHeaders);
}
public static readonly TheoryData<byte[]> _huffmanDecodingErrorData = new TheoryData<byte[]>
{
// Invalid Huffman encoding in header name
_literalHeaderFieldWithIndexingNewName.Concat(_huffmanLongPadding).ToArray(),
_literalHeaderFieldWithIndexingNewName.Concat(_huffmanEos).ToArray(),
_literalHeaderFieldWithoutIndexingNewName.Concat(_huffmanLongPadding).ToArray(),
_literalHeaderFieldWithoutIndexingNewName.Concat(_huffmanEos).ToArray(),
_literalHeaderFieldNeverIndexedNewName.Concat(_huffmanLongPadding).ToArray(),
_literalHeaderFieldNeverIndexedNewName.Concat(_huffmanEos).ToArray(),
// Invalid Huffman encoding in header value
_literalHeaderFieldWithIndexingIndexedName.Concat(_huffmanLongPadding).ToArray(),
_literalHeaderFieldWithIndexingIndexedName.Concat(_huffmanEos).ToArray(),
_literalHeaderFieldWithoutIndexingIndexedName.Concat(_huffmanLongPadding).ToArray(),
_literalHeaderFieldWithoutIndexingIndexedName.Concat(_huffmanEos).ToArray(),
_literalHeaderFieldNeverIndexedIndexedName.Concat(_huffmanLongPadding).ToArray(),
_literalHeaderFieldNeverIndexedIndexedName.Concat(_huffmanEos).ToArray()
};
[Theory]
[MemberData(nameof(_huffmanDecodingErrorData))]
public void WrapsHuffmanDecodingExceptionInHPackDecodingException(byte[] encoded)
{
HPackDecodingException exception = Assert.Throws<HPackDecodingException>(() => _decoder.Decode(encoded, endHeaders: true, handler: _handler));
Assert.Equal(SR.net_http_hpack_huffman_decode_failed, exception.Message);
Assert.IsType<HuffmanDecodingException>(exception.InnerException);
Assert.Empty(_handler.DecodedHeaders);
}
private static void TestDecodeWithIndexing(byte[] encoded, string expectedHeaderName, string expectedHeaderValue)
{
TestDecode(encoded, expectedHeaderName, expectedHeaderValue, expectDynamicTableEntry: true, byteAtATime: false);
TestDecode(encoded, expectedHeaderName, expectedHeaderValue, expectDynamicTableEntry: true, byteAtATime: true);
}
private static void TestDecodeWithoutIndexing(byte[] encoded, string expectedHeaderName, string expectedHeaderValue)
{
TestDecode(encoded, expectedHeaderName, expectedHeaderValue, expectDynamicTableEntry: false, byteAtATime: false);
TestDecode(encoded, expectedHeaderName, expectedHeaderValue, expectDynamicTableEntry: false, byteAtATime: true);
}
private static void TestDecode(byte[] encoded, string expectedHeaderName, string expectedHeaderValue, bool expectDynamicTableEntry, bool byteAtATime)
{
var (dynamicTable, decoder) = CreateDecoderAndTable();
var handler = new TestHttpHeadersHandler();
Assert.Equal(0, dynamicTable.Count);
Assert.Equal(0, dynamicTable.Size);
if (!byteAtATime)
{
decoder.Decode(encoded, endHeaders: true, handler: handler);
}
else
{
// Parse data in 1 byte chunks, separated by empty chunks
for (int i = 0; i < encoded.Length; i++)
{
bool end = i + 1 == encoded.Length;
decoder.Decode(Array.Empty<byte>(), endHeaders: false, handler: handler);
decoder.Decode(new byte[] { encoded[i] }, endHeaders: end, handler: handler);
}
}
Assert.Equal(expectedHeaderValue, handler.DecodedHeaders[expectedHeaderName]);
if (expectDynamicTableEntry)
{
Assert.Equal(1, dynamicTable.Count);
Assert.Equal(expectedHeaderName, Encoding.ASCII.GetString(dynamicTable[0].Name));
Assert.Equal(expectedHeaderValue, Encoding.ASCII.GetString(dynamicTable[0].Value));
Assert.Equal(expectedHeaderName.Length + expectedHeaderValue.Length + 32, dynamicTable.Size);
}
else
{
Assert.Equal(0, dynamicTable.Count);
Assert.Equal(0, dynamicTable.Size);
}
}
}
public class TestHttpHeadersHandler : IHttpHeadersHandler
{
public Dictionary<string, string> DecodedHeaders { get; } = new Dictionary<string, string>();
public Dictionary<int, KeyValuePair<string, string>> DecodedStaticHeaders { get; } = new Dictionary<int, KeyValuePair<string, string>>();
void IHttpHeadersHandler.OnHeader(ReadOnlySpan<byte> name, ReadOnlySpan<byte> value)
{
string headerName = Encoding.ASCII.GetString(name);
string headerValue = Encoding.ASCII.GetString(value);
DecodedHeaders[headerName] = headerValue;
}
void IHttpHeadersHandler.OnStaticIndexedHeader(int index)
{
ref readonly HeaderField entry = ref H2StaticTable.Get(index - 1);
((IHttpHeadersHandler)this).OnHeader(entry.Name, entry.Value);
DecodedStaticHeaders[index] = new KeyValuePair<string, string>(Encoding.ASCII.GetString(entry.Name), Encoding.ASCII.GetString(entry.Value));
}
void IHttpHeadersHandler.OnStaticIndexedHeader(int index, ReadOnlySpan<byte> value)
{
byte[] name = H2StaticTable.Get(index - 1).Name;
((IHttpHeadersHandler)this).OnHeader(name, value);
DecodedStaticHeaders[index] = new KeyValuePair<string, string>(Encoding.ASCII.GetString(name), Encoding.ASCII.GetString(value));
}
void IHttpHeadersHandler.OnHeadersComplete(bool endStream) { }
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
namespace Orleans.Runtime.ConsistentRing
{
/// <summary>
/// We use the 'backward/clockwise' definition to assign responsibilities on the ring.
/// E.g. in a ring of nodes {5, 10, 15} the responsible for key 7 is 10 (the node is responsible for its predecessing range).
/// The backwards/clockwise approach is consistent with many overlays, e.g., Chord, Cassandra, etc.
/// Note: MembershipOracle uses 'forward/counter-clockwise' definition to assign responsibilities.
/// E.g. in a ring of nodes {5, 10, 15}, the responsible of key 7 is node 5 (the node is responsible for its sucessing range)..
/// </summary>
internal class VirtualBucketsRingProvider : MarshalByRefObject, IConsistentRingProvider, ISiloStatusListener
{
private readonly List<IRingRangeListener> statusListeners;
private readonly SortedDictionary<uint, SiloAddress> bucketsMap;
private List<Tuple<uint, SiloAddress>> sortedBucketsList; // flattened sorted bucket list for fast lock-free calculation of CalculateTargetSilo
private readonly Logger logger;
private readonly SiloAddress myAddress;
private readonly int numBucketsPerSilo;
private readonly object lockable;
private bool running;
private IRingRange myRange;
internal VirtualBucketsRingProvider(SiloAddress siloAddr, int nBucketsPerSilo)
{
if (nBucketsPerSilo <= 0 )
throw new IndexOutOfRangeException("numBucketsPerSilo is out of the range. numBucketsPerSilo = " + nBucketsPerSilo);
logger = LogManager.GetLogger(typeof(VirtualBucketsRingProvider).Name);
statusListeners = new List<IRingRangeListener>();
bucketsMap = new SortedDictionary<uint, SiloAddress>();
sortedBucketsList = new List<Tuple<uint, SiloAddress>>();
myAddress = siloAddr;
numBucketsPerSilo = nBucketsPerSilo;
lockable = new object();
running = true;
myRange = RangeFactory.CreateFullRange();
logger.Info("Starting {0} on silo {1}.", typeof(VirtualBucketsRingProvider).Name, siloAddr.ToStringWithHashCode());
StringValueStatistic.FindOrCreate(StatisticNames.CONSISTENTRING_RING, ToString);
IntValueStatistic.FindOrCreate(StatisticNames.CONSISTENTRING_RINGSIZE, () => GetRingSize());
StringValueStatistic.FindOrCreate(StatisticNames.CONSISTENTRING_MYRANGE_RINGDISTANCE, () => String.Format("x{0,8:X8}", ((IRingRangeInternal)myRange).RangeSize()));
FloatValueStatistic.FindOrCreate(StatisticNames.CONSISTENTRING_MYRANGE_RINGPERCENTAGE, () => (float)((IRingRangeInternal)myRange).RangePercentage());
FloatValueStatistic.FindOrCreate(StatisticNames.CONSISTENTRING_AVERAGERINGPERCENTAGE, () =>
{
int size = GetRingSize();
return size == 0 ? 0 : ((float)100.0/(float) size);
});
// add myself to the list of members
AddServer(myAddress);
}
private void Stop()
{
running = false;
}
public IRingRange GetMyRange()
{
return myRange;
}
private int GetRingSize()
{
lock (lockable)
{
return bucketsMap.Values.Distinct().Count();
}
}
public bool SubscribeToRangeChangeEvents(IRingRangeListener observer)
{
lock (statusListeners)
{
if (statusListeners.Contains(observer)) return false;
statusListeners.Add(observer);
return true;
}
}
public bool UnSubscribeFromRangeChangeEvents(IRingRangeListener observer)
{
lock (statusListeners)
{
return statusListeners.Contains(observer) && statusListeners.Remove(observer);
}
}
private void NotifyLocalRangeSubscribers(IRingRange old, IRingRange now, bool increased)
{
logger.Info(ErrorCode.CRP_Notify, "-NotifyLocalRangeSubscribers about old {0} new {1} increased? {2}", old.ToString(), now.ToString(), increased);
List<IRingRangeListener> copy;
lock (statusListeners)
{
copy = statusListeners.ToList();
}
foreach (IRingRangeListener listener in copy)
{
try
{
listener.RangeChangeNotification(old, now, increased);
}
catch (Exception exc)
{
logger.Error(ErrorCode.CRP_Local_Subscriber_Exception,
String.Format("Local IRangeChangeListener {0} has thrown an exception when was notified about RangeChangeNotification about old {1} new {2} increased? {3}",
listener.GetType().FullName, old, now, increased), exc);
}
}
}
private void AddServer(SiloAddress silo)
{
lock (lockable)
{
List<uint> hashes = silo.GetUniformHashCodes(numBucketsPerSilo);
foreach (var hash in hashes)
{
if (bucketsMap.ContainsKey(hash))
{
var other = bucketsMap[hash];
// If two silos conflict, take the lesser of the two (usually the older one; that is, the lower epoch)
if (silo.CompareTo(other) > 0) continue;
}
bucketsMap[hash] = silo;
}
var myOldRange = myRange;
var bucketsList = bucketsMap.Select(pair => new Tuple<uint, SiloAddress>(pair.Key, pair.Value)).ToList();
var myNewRange = CalculateRange(bucketsList, myAddress);
// capture my range and sortedBucketsList for later lock-free access.
myRange = myNewRange;
sortedBucketsList = bucketsList;
logger.Info(ErrorCode.CRP_Added_Silo, "Added Server {0}. Current view: {1}", silo.ToStringWithHashCode(), this.ToString());
NotifyLocalRangeSubscribers(myOldRange, myNewRange, true);
}
}
internal void RemoveServer(SiloAddress silo)
{
lock (lockable)
{
if (!bucketsMap.ContainsValue(silo)) return; // we have already removed this silo
List<uint> hashes = silo.GetUniformHashCodes(numBucketsPerSilo);
foreach (var hash in hashes)
{
bucketsMap.Remove(hash);
}
var myOldRange = this.myRange;
var bucketsList = bucketsMap.Select(pair => new Tuple<uint, SiloAddress>(pair.Key, pair.Value)).ToList();
var myNewRange = CalculateRange(bucketsList, myAddress);
// capture my range and sortedBucketsList for later lock-free access.
myRange = myNewRange;
sortedBucketsList = bucketsList;
logger.Info(ErrorCode.CRP_Removed_Silo, "Removed Server {0}. Current view: {1}", silo.ToStringWithHashCode(), this.ToString());
NotifyLocalRangeSubscribers(myOldRange, myNewRange, true);
}
}
private static IRingRange CalculateRange(List<Tuple<uint, SiloAddress>> list, SiloAddress silo)
{
var ranges = new List<IRingRange>();
for (int i = 0; i < list.Count; i++)
{
var curr = list[i];
var next = list[(i + 1) % list.Count];
// 'backward/clockwise' definition to assign responsibilities on the ring.
if (next.Item2.Equals(silo))
{
IRingRange range = RangeFactory.CreateRange(curr.Item1, next.Item1);
ranges.Add(range);
}
}
return RangeFactory.CreateRange(ranges);
}
// just for debugging
public override string ToString()
{
Dictionary<SiloAddress, IRingRangeInternal> ranges = GetRanges();
List<KeyValuePair<SiloAddress, IRingRangeInternal>> sortedList = ranges.AsEnumerable().ToList();
sortedList.Sort((t1, t2) => t1.Value.RangePercentage().CompareTo(t2.Value.RangePercentage()));
return Utils.EnumerableToString(sortedList, kv => String.Format("{0} -> {1}", kv.Key, kv.Value.ToString()));
}
// Internal: for testing only!
internal Dictionary<SiloAddress, IRingRangeInternal> GetRanges()
{
List<SiloAddress> silos;
List<Tuple<uint, SiloAddress>> snapshotBucketsList;
lock (lockable)
{
silos = bucketsMap.Values.Distinct().ToList();
snapshotBucketsList = sortedBucketsList;
}
var ranges = new Dictionary<SiloAddress, IRingRangeInternal>();
foreach (var silo in silos)
{
var range = (IRingRangeInternal)CalculateRange(snapshotBucketsList, silo);
ranges.Add(silo, range);
}
return ranges;
}
public void SiloStatusChangeNotification(SiloAddress updatedSilo, SiloStatus status)
{
// This silo's status has changed
if (updatedSilo.Equals(myAddress))
{
if (status.IsTerminating())
{
Stop();
}
}
else // Status change for some other silo
{
if (status.IsTerminating())
{
RemoveServer(updatedSilo);
}
else if (status == SiloStatus.Active) // do not do anything with SiloStatus.Created or SiloStatus.Joining -- wait until it actually becomes active
{
AddServer(updatedSilo);
}
}
}
public SiloAddress GetPrimaryTargetSilo(uint key)
{
return CalculateTargetSilo(key);
}
/// <summary>
/// Finds the silo that owns the given hash value.
/// This routine will always return a non-null silo address unless the excludeThisSiloIfStopping parameter is true,
/// this is the only silo known, and this silo is stopping.
/// </summary>
/// <param name="hash"></param>
/// <param name="excludeThisSiloIfStopping"></param>
/// <returns></returns>
private SiloAddress CalculateTargetSilo(uint hash, bool excludeThisSiloIfStopping = true)
{
// put a private reference to point to sortedBucketsList,
// so if someone is changing the sortedBucketsList reference, we won't get it changed in the middle of our operation.
// The tricks of writing lock-free code!
var snapshotBucketsList = sortedBucketsList;
// excludeMySelf from being a TargetSilo if we're not running and the excludeThisSIloIfStopping flag is true. see the comment in the Stop method.
bool excludeMySelf = excludeThisSiloIfStopping && !running;
if (snapshotBucketsList.Count == 0)
{
// If the membership ring is empty, then we're the owner by default unless we're stopping.
return excludeMySelf ? null : myAddress;
}
// use clockwise ... current code in membershipOracle.CalculateTargetSilo() does counter-clockwise ...
// if you want to stick to counter-clockwise, change the responsibility definition in 'In()' method & responsibility defs in OrleansReminderMemory
// need to implement a binary search, but for now simply traverse the list of silos sorted by their hashes
Tuple<uint, SiloAddress> s = snapshotBucketsList.Find(tuple => (tuple.Item1 >= hash) && // <= hash for counter-clockwise responsibilities
(!tuple.Item2.Equals(myAddress) || !excludeMySelf));
if (s == null)
{
// if not found in traversal, then first silo should be returned (we are on a ring)
// if you go back to their counter-clockwise policy, then change the 'In()' method in OrleansReminderMemory
s = snapshotBucketsList[0]; // vs [membershipRingList.Count - 1]; for counter-clockwise policy
// Make sure it's not us...
if (s.Item2.Equals(myAddress) && excludeMySelf)
{
// vs [membershipRingList.Count - 2]; for counter-clockwise policy
s = snapshotBucketsList.Count > 1 ? snapshotBucketsList[1] : null;
}
}
if (logger.IsVerbose2) logger.Verbose2("Calculated ring partition owner silo {0} for key {1}: {2} --> {3}", s.Item2, hash, hash, s.Item1);
return s.Item2;
}
}
}
| |
using NUnit.Framework;
using OpenQA.Selenium.Environment;
using OpenQA.Selenium.Internal;
using System;
using System.Drawing;
using System.Runtime.InteropServices;
namespace OpenQA.Selenium.Interactions
{
[TestFixture]
public class BasicKeyboardInterfaceTest : DriverTestFixture
{
[SetUp]
public void Setup()
{
//new Actions(driver).SendKeys(Keys.Null).Perform();
IActionExecutor actionExecutor = driver as IActionExecutor;
if (actionExecutor != null)
{
actionExecutor.ResetInputState();
}
}
[TearDown]
public void ReleaseModifierKeys()
{
//new Actions(driver).SendKeys(Keys.Null).Perform();
IActionExecutor actionExecutor = driver as IActionExecutor;
if (actionExecutor != null)
{
actionExecutor.ResetInputState();
}
}
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowBasicKeyboardInput()
{
driver.Url = javascriptPage;
IWebElement keyReporter = driver.FindElement(By.Id("keyReporter"));
// Scroll the element into view before attempting any actions on it.
((IJavaScriptExecutor)driver).ExecuteScript("arguments[0].scrollIntoView();", keyReporter);
Actions actionProvider = new Actions(driver);
IAction sendLowercase = actionProvider.SendKeys(keyReporter, "abc def").Build();
sendLowercase.Perform();
Assert.AreEqual("abc def", keyReporter.GetAttribute("value"));
}
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowSendingKeyDownOnly()
{
driver.Url = javascriptPage;
IWebElement keysEventInput = driver.FindElement(By.Id("theworks"));
// Scroll the element into view before attempting any actions on it.
((IJavaScriptExecutor)driver).ExecuteScript("arguments[0].scrollIntoView();", keysEventInput);
Actions actionProvider = new Actions(driver);
IAction pressShift = actionProvider.KeyDown(keysEventInput, Keys.Shift).Build();
pressShift.Perform();
IWebElement keyLoggingElement = driver.FindElement(By.Id("result"));
string logText = keyLoggingElement.Text;
IAction releaseShift = actionProvider.KeyDown(keysEventInput, Keys.Shift).Build();
releaseShift.Perform();
Assert.That(logText, Does.EndWith("keydown"));
}
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowSendingKeyUp()
{
driver.Url = javascriptPage;
IWebElement keysEventInput = driver.FindElement(By.Id("theworks"));
// Scroll the element into view before attempting any actions on it.
((IJavaScriptExecutor)driver).ExecuteScript("arguments[0].scrollIntoView();", keysEventInput);
IAction pressShift = new Actions(driver).KeyDown(keysEventInput, Keys.Shift).Build();
pressShift.Perform();
IWebElement keyLoggingElement = driver.FindElement(By.Id("result"));
string eventsText = keyLoggingElement.Text;
Assert.That(keyLoggingElement.Text, Does.EndWith("keydown"));
IAction releaseShift = new Actions(driver).KeyUp(keysEventInput, Keys.Shift).Build();
releaseShift.Perform();
eventsText = keyLoggingElement.Text;
Assert.That(keyLoggingElement.Text, Does.EndWith("keyup"));
}
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowSendingKeysWithShiftPressed()
{
driver.Url = javascriptPage;
IWebElement keysEventInput = driver.FindElement(By.Id("theworks"));
keysEventInput.Click();
IAction pressShift = new Actions(driver).KeyDown(Keys.Shift).Build();
pressShift.Perform();
IAction sendLowercase = new Actions(driver).SendKeys("ab").Build();
sendLowercase.Perform();
IAction releaseShift = new Actions(driver).KeyUp(Keys.Shift).Build();
releaseShift.Perform();
AssertThatFormEventsFiredAreExactly("focus keydown keydown keypress keyup keydown keypress keyup keyup");
Assert.AreEqual("AB", keysEventInput.GetAttribute("value"));
}
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowSendingKeysToActiveElement()
{
driver.Url = bodyTypingPage;
Actions actionProvider = new Actions(driver);
IAction someKeys = actionProvider.SendKeys("ab").Build();
someKeys.Perform();
AssertThatBodyEventsFiredAreExactly("keypress keypress");
IWebElement formLoggingElement = driver.FindElement(By.Id("result"));
AssertThatFormEventsFiredAreExactly(string.Empty);
}
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowBasicKeyboardInputOnActiveElement()
{
driver.Url = javascriptPage;
IWebElement keyReporter = driver.FindElement(By.Id("keyReporter"));
keyReporter.Click();
Actions actionProvider = new Actions(driver);
IAction sendLowercase = actionProvider.SendKeys("abc def").Build();
sendLowercase.Perform();
Assert.AreEqual("abc def", keyReporter.GetAttribute("value"));
}
[Test]
public void ThrowsIllegalArgumentExceptionWithNullKeys()
{
driver.Url = javascriptPage;
Assert.That(() => driver.FindElement(By.Id("keyReporter")).SendKeys(null), Throws.InstanceOf<ArgumentNullException>());
}
[Test]
public void CanGenerateKeyboardShortcuts()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("keyboard_shortcut.html");
IWebElement body = driver.FindElement(By.XPath("//body"));
AssertBackgroundColor(body, Color.White);
new Actions(driver).KeyDown(Keys.Shift).SendKeys("1").KeyUp(Keys.Shift).Perform();
AssertBackgroundColor(body, Color.Green);
new Actions(driver).KeyDown(Keys.Alt).SendKeys("1").KeyUp(Keys.Alt).Perform();
AssertBackgroundColor(body, Color.LightBlue);
new Actions(driver)
.KeyDown(Keys.Shift).KeyDown(Keys.Alt)
.SendKeys("1")
.KeyUp(Keys.Shift).KeyUp(Keys.Alt)
.Perform();
AssertBackgroundColor(body, Color.Silver);
}
[Test]
public void SelectionSelectBySymbol()
{
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("single_text_input.html");
IWebElement input = driver.FindElement(By.Id("textInput"));
new Actions(driver).Click(input).SendKeys("abc def").Perform();
WaitFor(() => input.GetAttribute("value") == "abc def", "did not send initial keys");
if (!TestUtilities.IsInternetExplorer(driver))
{
// When using drivers other than the IE, the click in
// the below action sequence may fall inside the double-
// click threshold (the IE driver has guards to prevent
// inadvertent double-clicks with multiple actions calls),
// so we call the "release actions" end point before
// doing the second action.
IActionExecutor executor = driver as IActionExecutor;
if (executor != null)
{
executor.ResetInputState();
}
}
new Actions(driver).Click(input)
.KeyDown(Keys.Shift)
.SendKeys(Keys.Left)
.SendKeys(Keys.Left)
.KeyUp(Keys.Shift)
.SendKeys(Keys.Delete)
.Perform();
Assert.That(input.GetAttribute("value"), Is.EqualTo("abc d"));
}
[Test]
public void SelectionSelectByWord()
{
string controlModifier = Keys.Control;
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
controlModifier = Keys.Alt;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("single_text_input.html");
IWebElement input = driver.FindElement(By.Id("textInput"));
new Actions(driver).Click(input).SendKeys("abc def").Perform();
WaitFor(() => input.GetAttribute("value") == "abc def", "did not send initial keys");
if (!TestUtilities.IsInternetExplorer(driver))
{
// When using drivers other than the IE, the click in
// the below action sequence may fall inside the double-
// click threshold (the IE driver has guards to prevent
// inadvertent double-clicks with multiple actions calls),
// so we call the "release actions" end point before
// doing the second action.
IActionExecutor executor = driver as IActionExecutor;
if (executor != null)
{
executor.ResetInputState();
}
}
new Actions(driver).Click(input)
.KeyDown(Keys.Shift)
.KeyDown(controlModifier)
.SendKeys(Keys.Left)
.KeyUp(controlModifier)
.KeyUp(Keys.Shift)
.SendKeys(Keys.Delete)
.Perform();
WaitFor(() => input.GetAttribute("value") == "abc ", "did not send editing keys");
}
[Test]
public void SelectionSelectAll()
{
string controlModifier = Keys.Control;
if (RuntimeInformation.IsOSPlatform(OSPlatform.OSX))
{
controlModifier = Keys.Command;
}
driver.Url = EnvironmentManager.Instance.UrlBuilder.WhereIs("single_text_input.html");
IWebElement input = driver.FindElement(By.Id("textInput"));
new Actions(driver).Click(input).SendKeys("abc def").Perform();
WaitFor(() => input.GetAttribute("value") == "abc def", "did not send initial keys");
new Actions(driver).Click(input)
.KeyDown(controlModifier)
.SendKeys("a")
.KeyUp(controlModifier)
.SendKeys(Keys.Delete)
.Perform();
Assert.That(input.GetAttribute("value"), Is.EqualTo(string.Empty));
}
//------------------------------------------------------------------
// Tests below here are not included in the Java test suite
//------------------------------------------------------------------
[Test]
[IgnoreBrowser(Browser.Remote, "API not implemented in driver")]
public void ShouldAllowSendingKeysWithLeftShiftPressed()
{
driver.Url = javascriptPage;
IWebElement keysEventInput = driver.FindElement(By.Id("theworks"));
keysEventInput.Click();
IAction pressShift = new Actions(driver).KeyDown(Keys.LeftShift).Build();
pressShift.Perform();
IAction sendLowercase = new Actions(driver).SendKeys("ab").Build();
sendLowercase.Perform();
IAction releaseShift = new Actions(driver).KeyUp(Keys.LeftShift).Build();
releaseShift.Perform();
AssertThatFormEventsFiredAreExactly("focus keydown keydown keypress keyup keydown keypress keyup keyup");
Assert.AreEqual("AB", keysEventInput.GetAttribute("value"));
}
private void AssertThatFormEventsFiredAreExactly(string message, string expected)
{
Assert.AreEqual(expected, driver.FindElement(By.Id("result")).Text.Trim(), message);
}
private void AssertThatFormEventsFiredAreExactly(string expected)
{
AssertThatFormEventsFiredAreExactly(string.Empty, expected);
}
private void AssertThatBodyEventsFiredAreExactly(string expected)
{
Assert.AreEqual(expected, driver.FindElement(By.Id("body_result")).Text.Trim());
}
private Func<bool> BackgroundColorToChangeFrom(IWebElement element, Color currentColor)
{
return () =>
{
string hexValue = string.Format("#{0:x2}{1:x2}{2:x2}", currentColor.R, currentColor.G, currentColor.B);
string rgbValue = string.Format("rgb({0}, {1}, {2})", currentColor.R, currentColor.G, currentColor.B);
string rgbaValue = string.Format("rgba({0}, {1}, {2}, 1)", currentColor.R, currentColor.G, currentColor.B);
string actual = element.GetCssValue("background-color");
return actual != hexValue && actual != rgbValue && actual != rgbaValue;
};
}
private void AssertBackgroundColor(IWebElement el, Color expected)
{
string hexValue = string.Format("#{0:x2}{1:x2}{2:x2}", expected.R, expected.G, expected.B);
string rgbValue = string.Format("rgb({0}, {1}, {2})", expected.R, expected.G, expected.B);
string rgbaValue = string.Format("rgba({0}, {1}, {2}, 1)", expected.R, expected.G, expected.B);
string actual = el.GetCssValue("background-color");
Assert.That(actual, Is.EqualTo(hexValue).Or.EqualTo(rgbValue).Or.EqualTo(rgbaValue));
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
using OpenLiveWriter.CoreServices;
using OpenLiveWriter.CoreServices.UI;
using OpenLiveWriter.Localization;
namespace OpenLiveWriter.Controls.Wizard
{
/// <summary>
/// Summary description for WizardHeaderBar.
/// </summary>
public class WizardHeaderBar : UserControl
{
private GradientPanel titleBarPanel;
private LabelControl labelDescriptionText;
private LabelControl labelTitleText;
private PictureBox pictureBoxTitleBar;
/// <summary>
/// Required designer variable.
/// </summary>
private Container components = null;
private HeaderBarUITheme _uiTheme;
public WizardHeaderBar()
{
// This call is required by the Windows.Forms Form Designer.
InitializeComponent();
this.labelTitleText.Font = Res.GetFont(FontSize.Heading, FontStyle.Regular);
_uiTheme = new HeaderBarUITheme(this);
}
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.Resources.ResourceManager resources = new System.Resources.ResourceManager(typeof(WizardHeaderBar));
this.titleBarPanel = new OpenLiveWriter.Controls.Wizard.GradientPanel();
this.labelDescriptionText = new OpenLiveWriter.Controls.LabelControl();
this.labelTitleText = new OpenLiveWriter.Controls.LabelControl();
this.pictureBoxTitleBar = new System.Windows.Forms.PictureBox();
this.titleBarPanel.SuspendLayout();
this.SuspendLayout();
//
// titleBarPanel
//
this.titleBarPanel.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.titleBarPanel.Controls.Add(this.labelDescriptionText);
this.titleBarPanel.Controls.Add(this.labelTitleText);
this.titleBarPanel.Controls.Add(this.pictureBoxTitleBar);
this.titleBarPanel.Location = new System.Drawing.Point(0, 0);
this.titleBarPanel.Name = "titleBarPanel";
this.titleBarPanel.Size = new System.Drawing.Size(392, 64);
this.titleBarPanel.TabIndex = 1;
//
// labelDescriptionText
//
this.labelDescriptionText.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.labelDescriptionText.BackColor = System.Drawing.Color.Transparent;
this.labelDescriptionText.ForeColor = System.Drawing.Color.RoyalBlue;
this.labelDescriptionText.ImeMode = System.Windows.Forms.ImeMode.NoControl;
this.labelDescriptionText.Location = new System.Drawing.Point(72, 29);
this.labelDescriptionText.MultiLine = false;
this.labelDescriptionText.Name = "labelDescriptionText";
this.labelDescriptionText.Size = new System.Drawing.Size(312, 31);
this.labelDescriptionText.TabIndex = 13;
this.labelDescriptionText.Text = "A brief description of what this wizard does. djhdkjsdfhsd dfsdfsdf sdfsdf sdfsdf" +
"";
//
// labelTitleText
//
this.labelTitleText.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.labelTitleText.BackColor = System.Drawing.Color.Transparent;
this.labelTitleText.Font = Res.GetFont(FontSize.Heading, FontStyle.Regular);
this.labelTitleText.ForeColor = System.Drawing.Color.Navy;
this.labelTitleText.ImeMode = System.Windows.Forms.ImeMode.NoControl;
this.labelTitleText.Location = new System.Drawing.Point(72, 4);
this.labelTitleText.MultiLine = false;
this.labelTitleText.Name = "labelTitleText";
this.labelTitleText.Size = new System.Drawing.Size(312, 23);
this.labelTitleText.TabIndex = 12;
this.labelTitleText.Text = "Wizard Title";
//
// pictureBoxTitleBar
//
this.pictureBoxTitleBar.BackColor = System.Drawing.Color.Transparent;
this.pictureBoxTitleBar.Image = ((System.Drawing.Image)(resources.GetObject("pictureBoxTitleBar.Image")));
this.pictureBoxTitleBar.ImeMode = System.Windows.Forms.ImeMode.NoControl;
this.pictureBoxTitleBar.Location = new System.Drawing.Point(0, 8);
this.pictureBoxTitleBar.Name = "pictureBoxTitleBar";
this.pictureBoxTitleBar.Size = new System.Drawing.Size(72, 40);
this.pictureBoxTitleBar.TabIndex = 11;
this.pictureBoxTitleBar.TabStop = false;
//
// WizardHeaderBar
//
this.Controls.Add(this.titleBarPanel);
this.Name = "WizardHeaderBar";
this.Size = new System.Drawing.Size(392, 64);
this.titleBarPanel.ResumeLayout(false);
this.ResumeLayout(false);
}
#endregion
/// <summary>
/// Gets or sets the title bar image for the wizard.
/// </summary>
[
Category("Appearance"),
Localizable(true),
DefaultValue(null),
Description("Specifies the image to use in the wizard title bar.")
]
public Image TitleBarImage
{
get { return pictureBoxTitleBar.Image; }
set { pictureBoxTitleBar.Image = value; }
}
/// <summary>
/// Gets or sets the title of the wizard.
/// </summary>
[
Category("Appearance"),
Localizable(true),
DefaultValue("Wizard Title"),
Description("Specifies the title of the wizard.")
]
public string TitleText
{
get { return labelTitleText.Text; }
set { labelTitleText.Text = value; }
}
/// <summary>
/// Gets or sets the title of the wizard.
/// </summary>
[
Category("Appearance"),
Localizable(true),
DefaultValue("A description of what this wizard does."),
Description("Specifies the description of the wizard.")
]
public string DescriptionText
{
get { return labelDescriptionText.Text; }
set { labelDescriptionText.Text = value; }
}
private class HeaderBarUITheme : ControlUITheme
{
WizardHeaderBar _headerBar;
public HeaderBarUITheme(WizardHeaderBar headerBar) : base(headerBar, false)
{
_headerBar = headerBar;
ApplyTheme();
}
protected override void ApplyTheme(bool highContrast)
{
//tweak the colors to something more fine tuned
_headerBar.labelTitleText.ForeColor = !highContrast ? ColorHelper.StringToColor("#004E98") : SystemColors.WindowText;
_headerBar.labelDescriptionText.ForeColor = !highContrast ? ColorHelper.StringToColor("#316AC5") : SystemColors.WindowText;
}
}
}
class GradientPanel : Panel
{
public GradientPanel() : base()
{
}
protected override void OnPaint(PaintEventArgs e)
{
Rectangle gradientRect = new Rectangle(ClientRectangle.X, ClientRectangle.Y, ClientRectangle.Width, ClientRectangle.Height + 40);
using (LinearGradientBrush linearGradientBrush = new LinearGradientBrush(gradientRect, Color.White, BackColor, LinearGradientMode.Vertical))
//Rectangle gradientRect = ClientRectangle;
//using (LinearGradientBrush linearGradientBrush = new LinearGradientBrush(gradientRect, Color.White, BackColor, LinearGradientMode.Horizontal))
e.Graphics.FillRectangle(linearGradientBrush, new Rectangle(0, 0, this.Width, this.Height));
base.OnPaint(e);
}
}
}
| |
namespace ApiTemplate.Controllers
{
using System.Threading;
using System.Threading.Tasks;
using ApiTemplate.Commands;
using ApiTemplate.Constants;
using ApiTemplate.ViewModels;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.JsonPatch;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Net.Http.Headers;
#if Swagger
using Swashbuckle.AspNetCore.Annotations;
#endif
[Route("[controller]")]
[ApiController]
#if Versioning
[ApiVersion(ApiVersionName.V1)]
#endif
#if Swagger
[SwaggerResponse(StatusCodes.Status500InternalServerError, "The MIME type in the Accept HTTP header is not acceptable.", typeof(ProblemDetails))]
#endif
#pragma warning disable CA1822 // Mark members as static
#pragma warning disable CA1062 // Validate arguments of public methods
public class CarsController : ControllerBase
{
/// <summary>
/// Returns an Allow HTTP header with the allowed HTTP methods.
/// </summary>
/// <returns>A 200 OK response.</returns>
[HttpOptions(Name = CarsControllerRoute.OptionsCars)]
#if Swagger
[SwaggerResponse(StatusCodes.Status200OK, "The allowed HTTP methods.")]
#endif
public IActionResult Options()
{
this.HttpContext.Response.Headers.AppendCommaSeparatedValues(
HeaderNames.Allow,
HttpMethods.Get,
HttpMethods.Head,
HttpMethods.Options,
HttpMethods.Post);
return this.Ok();
}
/// <summary>
/// Returns an Allow HTTP header with the allowed HTTP methods for a car with the specified unique identifier.
/// </summary>
/// <param name="carId">The cars unique identifier.</param>
/// <returns>A 200 OK response.</returns>
[HttpOptions("{carId}", Name = CarsControllerRoute.OptionsCar)]
#if Swagger
[SwaggerResponse(StatusCodes.Status200OK, "The allowed HTTP methods.")]
#endif
#pragma warning disable IDE0060, CA1801 // Remove unused parameter
public IActionResult Options(int carId)
#pragma warning restore IDE0060, CA1801 // Remove unused parameter
{
this.HttpContext.Response.Headers.AppendCommaSeparatedValues(
HeaderNames.Allow,
HttpMethods.Delete,
HttpMethods.Get,
HttpMethods.Head,
HttpMethods.Options,
HttpMethods.Patch,
HttpMethods.Post,
HttpMethods.Put);
return this.Ok();
}
/// <summary>
/// Deletes the car with the specified unique identifier.
/// </summary>
/// <param name="command">The action command.</param>
/// <param name="carId">The cars unique identifier.</param>
/// <param name="cancellationToken">The cancellation token used to cancel the HTTP request.</param>
/// <returns>A 204 No Content response if the car was deleted or a 404 Not Found if a car with the specified
/// unique identifier was not found.</returns>
[HttpDelete("{carId}", Name = CarsControllerRoute.DeleteCar)]
#if Swagger
[SwaggerResponse(StatusCodes.Status204NoContent, "The car with the specified unique identifier was deleted.")]
[SwaggerResponse(StatusCodes.Status404NotFound, "A car with the specified unique identifier was not found.", typeof(ProblemDetails))]
#endif
public Task<IActionResult> DeleteAsync(
[FromServices] IDeleteCarCommand command,
int carId,
CancellationToken cancellationToken) => command.ExecuteAsync(carId, cancellationToken);
/// <summary>
/// Gets the car with the specified unique identifier.
/// </summary>
/// <param name="command">The action command.</param>
/// <param name="carId">The cars unique identifier.</param>
/// <param name="cancellationToken">The cancellation token used to cancel the HTTP request.</param>
/// <returns>A 200 OK response containing the car or a 404 Not Found if a car with the specified unique
/// identifier was not found.</returns>
[HttpGet("{carId}", Name = CarsControllerRoute.GetCar)]
[HttpHead("{carId}", Name = CarsControllerRoute.HeadCar)]
#if Swagger
[SwaggerResponse(StatusCodes.Status200OK, "The car with the specified unique identifier.", typeof(Car))]
[SwaggerResponse(StatusCodes.Status304NotModified, "The car has not changed since the date given in the If-Modified-Since HTTP header.", typeof(void))]
[SwaggerResponse(StatusCodes.Status404NotFound, "A car with the specified unique identifier could not be found.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status406NotAcceptable, "The MIME type in the Accept HTTP header is not acceptable.", typeof(ProblemDetails))]
#endif
public Task<IActionResult> GetAsync(
[FromServices] IGetCarCommand command,
int carId,
CancellationToken cancellationToken) => command.ExecuteAsync(carId, cancellationToken);
/// <summary>
/// Gets a collection of cars.
/// </summary>
/// <param name="command">The action command.</param>
/// <param name="pageOptions">The page options.</param>
/// <param name="cancellationToken">The cancellation token used to cancel the HTTP request.</param>
/// <returns>A 200 OK response containing a collection of cars, a 400 Bad Request if the page request
/// parameters are invalid or a 404 Not Found if a page with the specified page number was not found.
/// </returns>
[HttpGet("", Name = CarsControllerRoute.GetCarPage)]
[HttpHead("", Name = CarsControllerRoute.HeadCarPage)]
#if Swagger
[SwaggerResponse(StatusCodes.Status200OK, "A collection of cars for the specified page.", typeof(Connection<Car>))]
[SwaggerResponse(StatusCodes.Status400BadRequest, "The page request parameters are invalid.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status404NotFound, "A page with the specified page number was not found.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status406NotAcceptable, "The MIME type in the Accept HTTP header is not acceptable.", typeof(ProblemDetails))]
#endif
public Task<IActionResult> GetPageAsync(
[FromServices] IGetCarPageCommand command,
[FromQuery] PageOptions pageOptions,
CancellationToken cancellationToken) => command.ExecuteAsync(pageOptions, cancellationToken);
/// <summary>
/// Patches the car with the specified unique identifier.
/// </summary>
/// <param name="command">The action command.</param>
/// <param name="carId">The cars unique identifier.</param>
/// <param name="patch">The patch document. See http://jsonpatch.com.</param>
/// <param name="cancellationToken">The cancellation token used to cancel the HTTP request.</param>
/// <returns>A 200 OK if the car was patched, a 400 Bad Request if the patch was invalid or a 404 Not Found
/// if a car with the specified unique identifier was not found.</returns>
[HttpPatch("{carId}", Name = CarsControllerRoute.PatchCar)]
#if Swagger
[SwaggerResponse(StatusCodes.Status200OK, "The patched car with the specified unique identifier.", typeof(Car))]
[SwaggerResponse(StatusCodes.Status400BadRequest, "The patch document is invalid.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status404NotFound, "A car with the specified unique identifier could not be found.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status406NotAcceptable, "The MIME type in the Accept HTTP header is not acceptable.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status415UnsupportedMediaType, "The MIME type in the Content-Type HTTP header is unsupported.", typeof(ProblemDetails))]
#endif
public Task<IActionResult> PatchAsync(
[FromServices] IPatchCarCommand command,
int carId,
[FromBody] JsonPatchDocument<SaveCar> patch,
CancellationToken cancellationToken) => command.ExecuteAsync(carId, patch, cancellationToken);
/// <summary>
/// Creates a new car.
/// </summary>
/// <param name="command">The action command.</param>
/// <param name="car">The car to create.</param>
/// <param name="cancellationToken">The cancellation token used to cancel the HTTP request.</param>
/// <returns>A 201 Created response containing the newly created car or a 400 Bad Request if the car is
/// invalid.</returns>
[HttpPost("", Name = CarsControllerRoute.PostCar)]
#if Swagger
[SwaggerResponse(StatusCodes.Status201Created, "The car was created.", typeof(Car))]
[SwaggerResponse(StatusCodes.Status400BadRequest, "The car is invalid.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status406NotAcceptable, "The MIME type in the Accept HTTP header is not acceptable.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status415UnsupportedMediaType, "The MIME type in the Content-Type HTTP header is unsupported.", typeof(ProblemDetails))]
#endif
public Task<IActionResult> PostAsync(
[FromServices] IPostCarCommand command,
[FromBody] SaveCar car,
CancellationToken cancellationToken) => command.ExecuteAsync(car, cancellationToken);
/// <summary>
/// Updates an existing car with the specified unique identifier.
/// </summary>
/// <param name="command">The action command.</param>
/// <param name="carId">The car identifier.</param>
/// <param name="car">The car to update.</param>
/// <param name="cancellationToken">The cancellation token used to cancel the HTTP request.</param>
/// <returns>A 200 OK response containing the newly updated car, a 400 Bad Request if the car is invalid or a
/// or a 404 Not Found if a car with the specified unique identifier was not found.</returns>
[HttpPut("{carId}", Name = CarsControllerRoute.PutCar)]
#if Swagger
[SwaggerResponse(StatusCodes.Status200OK, "The car was updated.", typeof(Car))]
[SwaggerResponse(StatusCodes.Status400BadRequest, "The car is invalid.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status404NotFound, "A car with the specified unique identifier could not be found.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status406NotAcceptable, "The MIME type in the Accept HTTP header is not acceptable.", typeof(ProblemDetails))]
[SwaggerResponse(StatusCodes.Status415UnsupportedMediaType, "The MIME type in the Content-Type HTTP header is unsupported.", typeof(ProblemDetails))]
#endif
public Task<IActionResult> PutAsync(
[FromServices] IPutCarCommand command,
int carId,
[FromBody] SaveCar car,
CancellationToken cancellationToken) => command.ExecuteAsync(carId, car, cancellationToken);
}
}
#pragma warning restore CA1062 // Validate arguments of public methods
#pragma warning restore CA1822 // Mark members as static
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
#if ES_BUILD_STANDALONE
using Environment = Microsoft.Diagnostics.Tracing.Internal.Environment;
namespace Microsoft.Diagnostics.Tracing
#else
namespace System.Diagnostics.Tracing
#endif
{
/// <summary>
/// TraceLogging: used when implementing a custom TraceLoggingTypeInfo.
/// An instance of this type is provided to the TypeInfo.WriteMetadata method.
/// </summary>
internal class TraceLoggingMetadataCollector
{
private readonly Impl impl;
private readonly FieldMetadata currentGroup;
private int bufferedArrayFieldCount = int.MinValue;
/// <summary>
/// Creates a root-level collector.
/// </summary>
internal TraceLoggingMetadataCollector()
{
this.impl = new Impl();
}
/// <summary>
/// Creates a collector for a group.
/// </summary>
/// <param name="other">Parent collector</param>
/// <param name="group">The field that starts the group</param>
private TraceLoggingMetadataCollector(
TraceLoggingMetadataCollector other,
FieldMetadata group)
{
this.impl = other.impl;
this.currentGroup = group;
}
/// <summary>
/// The field tags to be used for the next field.
/// This will be reset to None each time a field is written.
/// </summary>
internal EventFieldTags Tags
{
get;
set;
}
internal int ScratchSize
{
get { return this.impl.scratchSize; }
}
internal int DataCount
{
get { return this.impl.dataCount; }
}
internal int PinCount
{
get { return this.impl.pinCount; }
}
private bool BeginningBufferedArray
{
get { return this.bufferedArrayFieldCount == 0; }
}
/// <summary>
/// Call this method to add a group to the event and to return
/// a new metadata collector that can be used to add fields to the
/// group. After all of the fields in the group have been written,
/// switch back to the original metadata collector to add fields
/// outside of the group.
/// Special-case: if name is null, no group is created, and AddGroup
/// returns the original metadata collector. This is useful when
/// adding the top-level group for an event.
/// Note: do not use the original metadata collector while the group's
/// metadata collector is in use, and do not use the group's metadata
/// collector after switching back to the original.
/// </summary>
/// <param name="name">
/// The name of the group. If name is null, the call to AddGroup is a
/// no-op (collector.AddGroup(null) returns collector).
/// </param>
/// <returns>
/// A new metadata collector that can be used to add fields to the group.
/// </returns>
public TraceLoggingMetadataCollector AddGroup(string name)
{
TraceLoggingMetadataCollector result = this;
if (name != null || // Normal.
this.BeginningBufferedArray) // Error, FieldMetadata's constructor will throw the appropriate exception.
{
var newGroup = new FieldMetadata(
name,
TraceLoggingDataType.Struct,
this.Tags,
this.BeginningBufferedArray);
this.AddField(newGroup);
result = new TraceLoggingMetadataCollector(this, newGroup);
}
return result;
}
/// <summary>
/// Adds a scalar field to an event.
/// </summary>
/// <param name="name">
/// The name to use for the added field. This value must not be null.
/// </param>
/// <param name="type">
/// The type code for the added field. This must be a fixed-size type
/// (e.g. string types are not supported).
/// </param>
public void AddScalar(string name, TraceLoggingDataType type)
{
int size;
switch ((TraceLoggingDataType)((int)type & Statics.InTypeMask))
{
case TraceLoggingDataType.Int8:
case TraceLoggingDataType.UInt8:
case TraceLoggingDataType.Char8:
size = 1;
break;
case TraceLoggingDataType.Int16:
case TraceLoggingDataType.UInt16:
case TraceLoggingDataType.Char16:
size = 2;
break;
case TraceLoggingDataType.Int32:
case TraceLoggingDataType.UInt32:
case TraceLoggingDataType.HexInt32:
case TraceLoggingDataType.Float:
case TraceLoggingDataType.Boolean32:
size = 4;
break;
case TraceLoggingDataType.Int64:
case TraceLoggingDataType.UInt64:
case TraceLoggingDataType.HexInt64:
case TraceLoggingDataType.Double:
case TraceLoggingDataType.FileTime:
size = 8;
break;
case TraceLoggingDataType.Guid:
case TraceLoggingDataType.SystemTime:
size = 16;
break;
default:
throw new ArgumentOutOfRangeException(nameof(type));
}
this.impl.AddScalar(size);
this.AddField(new FieldMetadata(name, type, this.Tags, this.BeginningBufferedArray));
}
/// <summary>
/// Adds a binary-format field to an event.
/// Compatible with core types: Binary, CountedUtf16String, CountedMbcsString.
/// Compatible with dataCollector methods: AddBinary(string), AddArray(Any8bitType[]).
/// </summary>
/// <param name="name">
/// The name to use for the added field. This value must not be null.
/// </param>
/// <param name="type">
/// The type code for the added field. This must be a Binary or CountedString type.
/// </param>
public void AddBinary(string name, TraceLoggingDataType type)
{
switch ((TraceLoggingDataType)((int)type & Statics.InTypeMask))
{
case TraceLoggingDataType.Binary:
case TraceLoggingDataType.CountedMbcsString:
case TraceLoggingDataType.CountedUtf16String:
break;
default:
throw new ArgumentOutOfRangeException(nameof(type));
}
this.impl.AddScalar(2);
this.impl.AddNonscalar();
this.AddField(new FieldMetadata(name, type, this.Tags, this.BeginningBufferedArray));
}
/// <summary>
/// Adds a null-terminated string field to an event.
/// Compatible with core types: Utf16String, MbcsString.
/// Compatible with dataCollector method: AddNullTerminatedString(string).
/// </summary>
/// <param name="name">
/// The name to use for the added field. This value must not be null.
/// </param>
/// <param name="type">
/// The type code for the added field. This must be a null-terminated string type.
/// </param>
public void AddNullTerminatedString(string name, TraceLoggingDataType type)
{
switch ((TraceLoggingDataType)((int)type & Statics.InTypeMask))
{
case TraceLoggingDataType.Utf16String:
break;
default:
throw new ArgumentOutOfRangeException(nameof(type));
}
this.impl.AddNonscalar();
this.AddField(new FieldMetadata(name, type, this.Tags, this.BeginningBufferedArray));
}
/// <summary>
/// Adds an array field to an event.
/// </summary>
/// <param name="name">
/// The name to use for the added field. This value must not be null.
/// </param>
/// <param name="type">
/// The type code for the added field. This must be a fixed-size type.
/// </param>
public void AddArray(string name, TraceLoggingDataType type)
{
switch ((TraceLoggingDataType)((int)type & Statics.InTypeMask))
{
case TraceLoggingDataType.Int8:
case TraceLoggingDataType.UInt8:
case TraceLoggingDataType.Int16:
case TraceLoggingDataType.UInt16:
case TraceLoggingDataType.Int32:
case TraceLoggingDataType.UInt32:
case TraceLoggingDataType.Int64:
case TraceLoggingDataType.UInt64:
case TraceLoggingDataType.Float:
case TraceLoggingDataType.Double:
case TraceLoggingDataType.Boolean32:
case TraceLoggingDataType.Guid:
case TraceLoggingDataType.FileTime:
case TraceLoggingDataType.HexInt32:
case TraceLoggingDataType.HexInt64:
case TraceLoggingDataType.Char16:
case TraceLoggingDataType.Char8:
break;
default:
throw new ArgumentOutOfRangeException(nameof(type));
}
if (this.BeginningBufferedArray)
{
throw new NotSupportedException(SR.EventSource_NotSupportedNestedArraysEnums);
}
this.impl.AddScalar(2);
this.impl.AddNonscalar();
this.AddField(new FieldMetadata(name, type, this.Tags, true));
}
public void BeginBufferedArray()
{
if (this.bufferedArrayFieldCount >= 0)
{
throw new NotSupportedException(SR.EventSource_NotSupportedNestedArraysEnums);
}
this.bufferedArrayFieldCount = 0;
this.impl.BeginBuffered();
}
public void EndBufferedArray()
{
if (this.bufferedArrayFieldCount != 1)
{
throw new InvalidOperationException(SR.EventSource_IncorrentlyAuthoredTypeInfo);
}
this.bufferedArrayFieldCount = int.MinValue;
this.impl.EndBuffered();
}
/// <summary>
/// Adds a custom-serialized field to an event.
/// </summary>
/// <param name="name">
/// The name to use for the added field. This value must not be null.
/// </param>
/// <param name="type">The encoding type for the field.</param>
/// <param name="metadata">Additional information needed to decode the field, if any.</param>
public void AddCustom(string name, TraceLoggingDataType type, byte[] metadata)
{
if (this.BeginningBufferedArray)
{
throw new NotSupportedException(SR.EventSource_NotSupportedCustomSerializedData);
}
this.impl.AddScalar(2);
this.impl.AddNonscalar();
this.AddField(new FieldMetadata(
name,
type,
this.Tags,
metadata));
}
internal byte[] GetMetadata()
{
var size = this.impl.Encode(null);
var metadata = new byte[size];
this.impl.Encode(metadata);
return metadata;
}
private void AddField(FieldMetadata fieldMetadata)
{
this.Tags = EventFieldTags.None;
this.bufferedArrayFieldCount++;
this.impl.fields.Add(fieldMetadata);
if (this.currentGroup != null)
{
this.currentGroup.IncrementStructFieldCount();
}
}
private class Impl
{
internal readonly List<FieldMetadata> fields = new List<FieldMetadata>();
internal short scratchSize;
internal sbyte dataCount;
internal sbyte pinCount;
private int bufferNesting;
private bool scalar;
public void AddScalar(int size)
{
if (this.bufferNesting == 0)
{
if (!this.scalar)
{
this.dataCount = checked((sbyte)(this.dataCount + 1));
}
this.scalar = true;
this.scratchSize = checked((short)(this.scratchSize + size));
}
}
public void AddNonscalar()
{
if (this.bufferNesting == 0)
{
this.scalar = false;
this.pinCount = checked((sbyte)(this.pinCount + 1));
this.dataCount = checked((sbyte)(this.dataCount + 1));
}
}
public void BeginBuffered()
{
if (this.bufferNesting == 0)
{
this.AddNonscalar();
}
this.bufferNesting++;
}
public void EndBuffered()
{
this.bufferNesting--;
}
public int Encode(byte[] metadata)
{
int size = 0;
foreach (var field in this.fields)
{
field.Encode(ref size, metadata);
}
return size;
}
}
}
}
| |
// Copyright (c) Microsoft Open Technologies, Inc. All rights reserved.
// Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using Microsoft.AspNet.Razor.Editor;
using Microsoft.AspNet.Razor.Generator;
using Microsoft.AspNet.Razor.Parser.SyntaxTree;
using Microsoft.AspNet.Razor.Text;
using Microsoft.AspNet.Razor.Tokenizer.Symbols;
namespace Microsoft.AspNet.Razor.Parser
{
public partial class CSharpCodeParser
{
private void SetupDirectives()
{
MapDirectives(InheritsDirective, SyntaxConstants.CSharp.InheritsKeyword);
MapDirectives(FunctionsDirective, SyntaxConstants.CSharp.FunctionsKeyword);
MapDirectives(SectionDirective, SyntaxConstants.CSharp.SectionKeyword);
MapDirectives(HelperDirective, SyntaxConstants.CSharp.HelperKeyword);
MapDirectives(LayoutDirective, SyntaxConstants.CSharp.LayoutKeyword);
MapDirectives(SessionStateDirective, SyntaxConstants.CSharp.SessionStateKeyword);
}
protected virtual void LayoutDirective()
{
AssertDirective(SyntaxConstants.CSharp.LayoutKeyword);
AcceptAndMoveNext();
Context.CurrentBlock.Type = BlockType.Directive;
// Accept spaces, but not newlines
bool foundSomeWhitespace = At(CSharpSymbolType.WhiteSpace);
AcceptWhile(CSharpSymbolType.WhiteSpace);
Output(SpanKind.MetaCode, foundSomeWhitespace ? AcceptedCharacters.None : AcceptedCharacters.Any);
// First non-whitespace character starts the Layout Page, then newline ends it
AcceptUntil(CSharpSymbolType.NewLine);
Span.CodeGenerator = new SetLayoutCodeGenerator(Span.GetContent());
Span.EditHandler.EditorHints = EditorHints.LayoutPage | EditorHints.VirtualPath;
bool foundNewline = Optional(CSharpSymbolType.NewLine);
AddMarkerSymbolIfNecessary();
Output(SpanKind.MetaCode, foundNewline ? AcceptedCharacters.None : AcceptedCharacters.Any);
}
protected virtual void SessionStateDirective()
{
AssertDirective(SyntaxConstants.CSharp.SessionStateKeyword);
AcceptAndMoveNext();
SessionStateDirectiveCore();
}
protected void SessionStateDirectiveCore()
{
SessionStateTypeDirective(RazorResources.ParserEror_SessionDirectiveMissingValue, (key, value) => new RazorDirectiveAttributeCodeGenerator(key, value));
}
protected void SessionStateTypeDirective(string noValueError, Func<string, string, SpanCodeGenerator> createCodeGenerator)
{
// Set the block type
Context.CurrentBlock.Type = BlockType.Directive;
// Accept whitespace
CSharpSymbol remainingWs = AcceptSingleWhiteSpaceCharacter();
if (Span.Symbols.Count > 1)
{
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
}
Output(SpanKind.MetaCode);
if (remainingWs != null)
{
Accept(remainingWs);
}
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: true));
// Parse a Type Name
if (!ValidSessionStateValue())
{
Context.OnError(CurrentLocation, noValueError);
}
// Pull out the type name
string sessionStateValue = String.Concat(
Span.Symbols
.Cast<CSharpSymbol>()
.Select(sym => sym.Content)).Trim();
// Set up code generation
Span.CodeGenerator = createCodeGenerator(SyntaxConstants.CSharp.SessionStateKeyword, sessionStateValue);
// Output the span and finish the block
CompleteBlock();
Output(SpanKind.Code);
}
protected virtual bool ValidSessionStateValue()
{
return Optional(CSharpSymbolType.Identifier);
}
[SuppressMessage("Microsoft.Maintainability", "CA1506:AvoidExcessiveClassCoupling", Justification = "Coupling will be reviewed at a later date")]
[SuppressMessage("Microsoft.Globalization", "CA1308:NormalizeStringsToUppercase", Justification = "C# Keywords are always lower-case")]
protected virtual void HelperDirective()
{
bool nested = Context.IsWithin(BlockType.Helper);
// Set the block and span type
Context.CurrentBlock.Type = BlockType.Helper;
// Verify we're on "helper" and accept
AssertDirective(SyntaxConstants.CSharp.HelperKeyword);
Block block = new Block(CurrentSymbol.Content.ToString().ToLowerInvariant(), CurrentLocation);
AcceptAndMoveNext();
if (nested)
{
Context.OnError(CurrentLocation, RazorResources.ParseError_Helpers_Cannot_Be_Nested);
}
// Accept a single whitespace character if present, if not, we should stop now
if (!At(CSharpSymbolType.WhiteSpace))
{
string error;
if (At(CSharpSymbolType.NewLine))
{
error = RazorResources.ErrorComponent_Newline;
}
else if (EndOfFile)
{
error = RazorResources.ErrorComponent_EndOfFile;
}
else
{
error = string.Format(RazorResources.ErrorComponent_Character, CurrentSymbol.Content);
}
Context.OnError(
CurrentLocation,
string.Format(RazorResources.ParseError_Unexpected_Character_At_Helper_Name_Start, error));
PutCurrentBack();
Output(SpanKind.MetaCode);
return;
}
CSharpSymbol remainingWs = AcceptSingleWhiteSpaceCharacter();
// Output metacode and continue
Output(SpanKind.MetaCode);
if (remainingWs != null)
{
Accept(remainingWs);
}
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: true)); // Don't accept newlines.
// Expecting an identifier (helper name)
bool errorReported = !Required(CSharpSymbolType.Identifier, errorIfNotFound: true, errorBase: arg => string.Format(RazorResources.ParseError_Unexpected_Character_At_Helper_Name_Start, arg));
if (!errorReported)
{
Assert(CSharpSymbolType.Identifier);
AcceptAndMoveNext();
}
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: true));
// Expecting parameter list start: "("
SourceLocation bracketErrorPos = CurrentLocation;
if (!Optional(CSharpSymbolType.LeftParenthesis))
{
if (!errorReported)
{
errorReported = true;
Context.OnError(
CurrentLocation,
string.Format(RazorResources.ParseError_MissingCharAfterHelperName,"("));
}
}
else
{
SourceLocation bracketStart = CurrentLocation;
if (!Balance(BalancingModes.NoErrorOnFailure,
CSharpSymbolType.LeftParenthesis,
CSharpSymbolType.RightParenthesis,
bracketStart))
{
errorReported = true;
Context.OnError(
bracketErrorPos,
RazorResources.ParseError_UnterminatedHelperParameterList);
}
Optional(CSharpSymbolType.RightParenthesis);
}
int bookmark = CurrentLocation.AbsoluteIndex;
IEnumerable<CSharpSymbol> ws = ReadWhile(IsSpacingToken(includeNewLines: true, includeComments: true));
// Expecting a "{"
SourceLocation errorLocation = CurrentLocation;
bool headerComplete = At(CSharpSymbolType.LeftBrace);
if (headerComplete)
{
Accept(ws);
AcceptAndMoveNext();
}
else
{
Context.Source.Position = bookmark;
NextToken();
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: true));
if (!errorReported)
{
Context.OnError(
errorLocation,
string.Format(RazorResources.ParseError_MissingCharAfterHelperParameters,
Language.GetSample(CSharpSymbolType.LeftBrace)));
}
}
// Grab the signature and build the code generator
AddMarkerSymbolIfNecessary();
LocationTagged<string> signature = Span.GetContent();
HelperCodeGenerator blockGen = new HelperCodeGenerator(signature, headerComplete);
Context.CurrentBlock.CodeGenerator = blockGen;
// The block will generate appropriate code,
Span.CodeGenerator = SpanCodeGenerator.Null;
if (!headerComplete)
{
CompleteBlock();
Output(SpanKind.Code);
return;
}
else
{
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
Output(SpanKind.Code);
}
// We're valid, so parse the nested block
AutoCompleteEditHandler bodyEditHandler = new AutoCompleteEditHandler(Language.TokenizeString);
using (PushSpanConfig(DefaultSpanConfig))
{
using (Context.StartBlock(BlockType.Statement))
{
Span.EditHandler = bodyEditHandler;
CodeBlock(false, block);
CompleteBlock(insertMarkerIfNecessary: true);
Output(SpanKind.Code);
}
}
Initialize(Span);
EnsureCurrent();
Span.CodeGenerator = SpanCodeGenerator.Null; // The block will generate the footer code.
if (!Optional(CSharpSymbolType.RightBrace))
{
// The } is missing, so set the initial signature span to use it as an autocomplete string
bodyEditHandler.AutoCompleteString = "}";
// Need to be able to accept anything to properly handle the autocomplete
bodyEditHandler.AcceptedCharacters = AcceptedCharacters.Any;
}
else
{
blockGen.Footer = Span.GetContent();
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
}
CompleteBlock();
Output(SpanKind.Code);
}
protected virtual void SectionDirective()
{
bool nested = Context.IsWithin(BlockType.Section);
bool errorReported = false;
// Set the block and span type
Context.CurrentBlock.Type = BlockType.Section;
// Verify we're on "section" and accept
AssertDirective(SyntaxConstants.CSharp.SectionKeyword);
AcceptAndMoveNext();
if (nested)
{
Context.OnError(CurrentLocation, string.Format(RazorResources.ParseError_Sections_Cannot_Be_Nested,RazorResources.SectionExample_CS));
errorReported = true;
}
IEnumerable<CSharpSymbol> ws = ReadWhile(IsSpacingToken(includeNewLines: true, includeComments: false));
// Get the section name
string sectionName = String.Empty;
if (!Required(CSharpSymbolType.Identifier,
errorIfNotFound: true,
errorBase: arg => string.Format(RazorResources.ParseError_Unexpected_Character_At_Section_Name_Start, arg)))
{
if (!errorReported)
{
errorReported = true;
}
PutCurrentBack();
PutBack(ws);
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: false));
}
else
{
Accept(ws);
sectionName = CurrentSymbol.Content;
AcceptAndMoveNext();
}
Context.CurrentBlock.CodeGenerator = new SectionCodeGenerator(sectionName);
SourceLocation errorLocation = CurrentLocation;
ws = ReadWhile(IsSpacingToken(includeNewLines: true, includeComments: false));
// Get the starting brace
bool sawStartingBrace = At(CSharpSymbolType.LeftBrace);
if (!sawStartingBrace)
{
if (!errorReported)
{
errorReported = true;
Context.OnError(errorLocation, RazorResources.ParseError_MissingOpenBraceAfterSection);
}
PutCurrentBack();
PutBack(ws);
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: false));
Optional(CSharpSymbolType.NewLine);
Output(SpanKind.MetaCode);
CompleteBlock();
return;
}
else
{
Accept(ws);
}
// Set up edit handler
AutoCompleteEditHandler editHandler = new AutoCompleteEditHandler(Language.TokenizeString) { AutoCompleteAtEndOfSpan = true };
Span.EditHandler = editHandler;
Span.Accept(CurrentSymbol);
// Output Metacode then switch to section parser
Output(SpanKind.MetaCode);
SectionBlock("{", "}", caseSensitive: true);
Span.CodeGenerator = SpanCodeGenerator.Null;
// Check for the terminating "}"
if (!Optional(CSharpSymbolType.RightBrace))
{
editHandler.AutoCompleteString = "}";
Context.OnError(CurrentLocation,
string.Format(RazorResources.ParseError_Expected_X,
Language.GetSample(CSharpSymbolType.RightBrace)));
}
else
{
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
}
CompleteBlock(insertMarkerIfNecessary: false, captureWhitespaceToEndOfLine: true);
Output(SpanKind.MetaCode);
return;
}
protected virtual void FunctionsDirective()
{
// Set the block type
Context.CurrentBlock.Type = BlockType.Functions;
// Verify we're on "functions" and accept
AssertDirective(SyntaxConstants.CSharp.FunctionsKeyword);
Block block = new Block(CurrentSymbol);
AcceptAndMoveNext();
AcceptWhile(IsSpacingToken(includeNewLines: true, includeComments: false));
if (!At(CSharpSymbolType.LeftBrace))
{
Context.OnError(CurrentLocation,
string.Format(RazorResources.ParseError_Expected_X, Language.GetSample(CSharpSymbolType.LeftBrace)));
CompleteBlock();
Output(SpanKind.MetaCode);
return;
}
else
{
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
}
// Capture start point and continue
SourceLocation blockStart = CurrentLocation;
AcceptAndMoveNext();
// Output what we've seen and continue
Output(SpanKind.MetaCode);
AutoCompleteEditHandler editHandler = new AutoCompleteEditHandler(Language.TokenizeString);
Span.EditHandler = editHandler;
Balance(BalancingModes.NoErrorOnFailure, CSharpSymbolType.LeftBrace, CSharpSymbolType.RightBrace, blockStart);
Span.CodeGenerator = new TypeMemberCodeGenerator();
if (!At(CSharpSymbolType.RightBrace))
{
editHandler.AutoCompleteString = "}";
Context.OnError(block.Start, string.Format(RazorResources.ParseError_Expected_EndOfBlock_Before_EOF, block.Name, "}", "{"));
CompleteBlock();
Output(SpanKind.Code);
}
else
{
Output(SpanKind.Code);
Assert(CSharpSymbolType.RightBrace);
Span.CodeGenerator = SpanCodeGenerator.Null;
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
AcceptAndMoveNext();
CompleteBlock();
Output(SpanKind.MetaCode);
}
}
protected virtual void InheritsDirective()
{
// Verify we're on the right keyword and accept
AssertDirective(SyntaxConstants.CSharp.InheritsKeyword);
AcceptAndMoveNext();
InheritsDirectiveCore();
}
[SuppressMessage("Microsoft.Usage", "CA1801:ReviewUnusedParameters", MessageId = "directive", Justification = "This only occurs in Release builds, where this method is empty by design")]
[SuppressMessage("Microsoft.Performance", "CA1822:MarkMembersAsStatic", Justification = "This only occurs in Release builds, where this method is empty by design")]
[Conditional("DEBUG")]
protected void AssertDirective(string directive)
{
Assert(CSharpSymbolType.Identifier);
Debug.Assert(String.Equals(CurrentSymbol.Content, directive, StringComparison.Ordinal));
}
protected void InheritsDirectiveCore()
{
BaseTypeDirective(RazorResources.ParseError_InheritsKeyword_Must_Be_Followed_By_TypeName, baseType => new SetBaseTypeCodeGenerator(baseType));
}
protected void BaseTypeDirective(string noTypeNameError, Func<string, SpanCodeGenerator> createCodeGenerator)
{
// Set the block type
Context.CurrentBlock.Type = BlockType.Directive;
// Accept whitespace
CSharpSymbol remainingWs = AcceptSingleWhiteSpaceCharacter();
if (Span.Symbols.Count > 1)
{
Span.EditHandler.AcceptedCharacters = AcceptedCharacters.None;
}
Output(SpanKind.MetaCode);
if (remainingWs != null)
{
Accept(remainingWs);
}
AcceptWhile(IsSpacingToken(includeNewLines: false, includeComments: true));
if (EndOfFile || At(CSharpSymbolType.WhiteSpace) || At(CSharpSymbolType.NewLine))
{
Context.OnError(CurrentLocation, noTypeNameError);
}
// Parse to the end of the line
AcceptUntil(CSharpSymbolType.NewLine);
if (!Context.DesignTimeMode)
{
// We want the newline to be treated as code, but it causes issues at design-time.
Optional(CSharpSymbolType.NewLine);
}
// Pull out the type name
string baseType = Span.GetContent();
// Set up code generation
Span.CodeGenerator = createCodeGenerator(baseType.Trim());
// Output the span and finish the block
CompleteBlock();
Output(SpanKind.Code);
}
}
}
| |
using System;
using EncompassRest.Loans.Enums;
using EncompassRest.Schema;
namespace EncompassRest.Loans
{
/// <summary>
/// ReoProperty
/// </summary>
[Entity(SerializeWholeListWhenDirty = true)]
public sealed partial class ReoProperty : DirtyExtensibleObject, IIdentifiable
{
private DirtyValue<DateTime?>? _acquiredDate;
private DirtyValue<string?>? _city;
private DirtyValue<string?>? _country;
private DirtyValue<string?>? _countryCode;
private DirtyValue<StringEnumValue<DispositionStatusType>>? _dispositionStatusType;
private DirtyValue<bool?>? _entityDeleted;
private DirtyValue<bool?>? _foreignAddressIndicator;
private DirtyValue<StringEnumValue<FuturePropertyUsageType>>? _futurePropertyUsageType;
private DirtyValue<string?>? _futureUsageTypeOtherDesc;
private DirtyValue<StringEnumValue<GsePropertyType>>? _gsePropertyType;
private DirtyValue<string?>? _id;
private DirtyValue<bool?>? _includeInAusExport;
private DirtyValue<bool?>? _isEmpty;
private DirtyValue<bool?>? _liabilityDoesNotApply;
private DirtyValue<decimal?>? _lienInstallmentAmount;
private DirtyValue<decimal?>? _lienUpbAmount;
private DirtyValue<string?>? _maintenanceExpenseAmount;
private DirtyValue<decimal?>? _marketValueAmount;
private DirtyValue<bool?>? _noLinkToDocTrackIndicator;
private DirtyValue<int?>? _numberOfUnits;
private DirtyValue<StringEnumValue<Owner>>? _owner;
private DirtyValue<decimal?>? _participationPercentage;
private DirtyValue<decimal?>? _percentageofRental;
private DirtyValue<string?>? _postalCode;
private DirtyValue<bool?>? _printAttachIndicator;
private DirtyValue<bool?>? _printUserJobTitleIndicator;
private DirtyValue<bool?>? _printUserNameIndicator;
private DirtyValue<StringEnumValue<ReoPropertyUsageType>>? _propertyUsageType;
private DirtyValue<int?>? _purchasePrice;
private DirtyValue<decimal?>? _rentalIncomeGrossAmount;
private DirtyValue<decimal?>? _rentalIncomeNetAmount;
private DirtyValue<string?>? _reoComments;
private DirtyValue<string?>? _reoId;
private DirtyValue<int?>? _reoPropertyIndex;
private DirtyValue<DateTime?>? _requestDate;
private DirtyValue<StringEnumValue<State>>? _state;
private DirtyValue<string?>? _streetAddress;
private DirtyValue<bool?>? _subjectIndicator;
private DirtyValue<string?>? _title;
private DirtyValue<string?>? _titleFax;
private DirtyValue<string?>? _titlePhone;
private DirtyValue<string?>? _unitNumber;
private DirtyValue<StringEnumValue<UnitType>>? _unitType;
private DirtyValue<string?>? _uRLA2020StreetAddress;
private DirtyValue<string?>? _yearBuilt;
/// <summary>
/// Mortgage Property Date Acquired [FMNN22]
/// </summary>
public DateTime? AcquiredDate { get => _acquiredDate; set => SetField(ref _acquiredDate, value); }
/// <summary>
/// Mortgage Property City [FMNN06]
/// </summary>
public string? City { get => _city; set => SetField(ref _city, value); }
/// <summary>
/// Mortgage Property Country [FMNN57]
/// </summary>
public string? Country { get => _country; set => SetField(ref _country, value); }
/// <summary>
/// Mortgage Property Country Code [FMNN51]
/// </summary>
public string? CountryCode { get => _countryCode; set => SetField(ref _countryCode, value); }
/// <summary>
/// Mortgage Property Disposition Status [FMNN24]
/// </summary>
public StringEnumValue<DispositionStatusType> DispositionStatusType { get => _dispositionStatusType; set => SetField(ref _dispositionStatusType, value); }
/// <summary>
/// ReoProperty EntityDeleted
/// </summary>
public bool? EntityDeleted { get => _entityDeleted; set => SetField(ref _entityDeleted, value); }
/// <summary>
/// Mortgage Property Foreign Address Indicator [FMNN58]
/// </summary>
public bool? ForeignAddressIndicator { get => _foreignAddressIndicator; set => SetField(ref _foreignAddressIndicator, value); }
/// <summary>
/// Mortgage Property Future Use of Property [FMNN55]
/// </summary>
public StringEnumValue<FuturePropertyUsageType> FuturePropertyUsageType { get => _futurePropertyUsageType; set => SetField(ref _futurePropertyUsageType, value); }
/// <summary>
/// Description If Future Use of Property Is Other [FMNN56]
/// </summary>
public string? FutureUsageTypeOtherDesc { get => _futureUsageTypeOtherDesc; set => SetField(ref _futureUsageTypeOtherDesc, value); }
/// <summary>
/// Mortgage Property Type [FMNN18]
/// </summary>
public StringEnumValue<GsePropertyType> GsePropertyType { get => _gsePropertyType; set => SetField(ref _gsePropertyType, value); }
/// <summary>
/// ReoProperty Id
/// </summary>
public string? Id { get => _id; set => SetField(ref _id, value); }
/// <summary>
/// Include In AUS/Export [FMNN52]
/// </summary>
[LoanFieldProperty(OptionsJson = "{\"Y\":\"Include In AUS/Export\"}")]
public bool? IncludeInAusExport { get => _includeInAusExport; set => SetField(ref _includeInAusExport, value); }
/// <summary>
/// ReoProperty IsEmpty
/// </summary>
public bool? IsEmpty { get => _isEmpty; set => SetField(ref _isEmpty, value); }
/// <summary>
/// Mortgage Property Not linked to Liability [FMNN49]
/// </summary>
public bool? LiabilityDoesNotApply { get => _liabilityDoesNotApply; set => SetField(ref _liabilityDoesNotApply, value); }
/// <summary>
/// Mortgage Property Monthly Payment [FMNN16]
/// </summary>
public decimal? LienInstallmentAmount { get => _lienInstallmentAmount; set => SetField(ref _lienInstallmentAmount, value); }
/// <summary>
/// Mortgage Property Unpaid Balance [FMNN17]
/// </summary>
public decimal? LienUpbAmount { get => _lienUpbAmount; set => SetField(ref _lienUpbAmount, value); }
/// <summary>
/// Mortgage Property Monthly Expense [FMNN21]
/// </summary>
public string? MaintenanceExpenseAmount { get => _maintenanceExpenseAmount; set => SetField(ref _maintenanceExpenseAmount, value); }
/// <summary>
/// Mortgage Property Market Value [FMNN19]
/// </summary>
public decimal? MarketValueAmount { get => _marketValueAmount; set => SetField(ref _marketValueAmount, value); }
/// <summary>
/// Mortgage Property No Link to Doc Tracking [FMNN97]
/// </summary>
[LoanFieldProperty(ReadOnly = true)]
public bool? NoLinkToDocTrackIndicator { get => _noLinkToDocTrackIndicator; set => SetField(ref _noLinkToDocTrackIndicator, value); }
/// <summary>
/// Number of Units [FMNN54]
/// </summary>
public int? NumberOfUnits { get => _numberOfUnits; set => SetField(ref _numberOfUnits, value); }
/// <summary>
/// Mortgage Property Owner [FMNN46]
/// </summary>
public StringEnumValue<Owner> Owner { get => _owner; set => SetField(ref _owner, value); }
/// <summary>
/// Mortgage Property Participation Percent [FMNN14]
/// </summary>
public decimal? ParticipationPercentage { get => _participationPercentage; set => SetField(ref _participationPercentage, value); }
/// <summary>
/// Mortgage Property Percent of Units Rented [FMNN26]
/// </summary>
public decimal? PercentageofRental { get => _percentageofRental; set => SetField(ref _percentageofRental, value); }
/// <summary>
/// Mortgage Property Zipcode [FMNN08]
/// </summary>
[LoanFieldProperty(Format = LoanFieldFormat.ZIPCODE)]
public string? PostalCode { get => _postalCode; set => SetField(ref _postalCode, value); }
/// <summary>
/// Mortgage Property Print - See Attached Authorization [FMNN36]
/// </summary>
[LoanFieldProperty(OptionsJson = "{\"Y\":\"Print \\\"See attached borrower's authorization\\\" on signature line.\"}")]
public bool? PrintAttachIndicator { get => _printAttachIndicator; set => SetField(ref _printAttachIndicator, value); }
/// <summary>
/// Mortgage Property Print User Job Title [FMNN64]
/// </summary>
[LoanFieldProperty(OptionsJson = "{\"Y\":\"Print user's job title\"}")]
public bool? PrintUserJobTitleIndicator { get => _printUserJobTitleIndicator; set => SetField(ref _printUserJobTitleIndicator, value); }
/// <summary>
/// Mortgage Property Print User Name as Title [FMNN38]
/// </summary>
[LoanFieldProperty(OptionsJson = "{\"Y\":\"Print user's name as title\"}")]
public bool? PrintUserNameIndicator { get => _printUserNameIndicator; set => SetField(ref _printUserNameIndicator, value); }
/// <summary>
/// Mortgage Property Use of Property [FMNN41]
/// </summary>
public StringEnumValue<ReoPropertyUsageType> PropertyUsageType { get => _propertyUsageType; set => SetField(ref _propertyUsageType, value); }
/// <summary>
/// Mortgage Property Purchase Price [FMNN23]
/// </summary>
public int? PurchasePrice { get => _purchasePrice; set => SetField(ref _purchasePrice, value); }
/// <summary>
/// Mortgage Property Monthly Gross Rental Income [FMNN20]
/// </summary>
public decimal? RentalIncomeGrossAmount { get => _rentalIncomeGrossAmount; set => SetField(ref _rentalIncomeGrossAmount, value); }
/// <summary>
/// Mortgage Property Net Rental Income [FMNN32]
/// </summary>
public decimal? RentalIncomeNetAmount { get => _rentalIncomeNetAmount; set => SetField(ref _rentalIncomeNetAmount, value); }
/// <summary>
/// Mortgage Property Comments [FMNN25]
/// </summary>
public string? ReoComments { get => _reoComments; set => SetField(ref _reoComments, value); }
/// <summary>
/// Mortgage Property REO ID [FMNN43]
/// </summary>
[LoanFieldProperty(ReadOnly = true)]
public string? ReoId { get => _reoId; set => SetField(ref _reoId, value); }
/// <summary>
/// ReoProperty ReoPropertyIndex
/// </summary>
public int? ReoPropertyIndex { get => _reoPropertyIndex; set => SetField(ref _reoPropertyIndex, value); }
/// <summary>
/// Mortgage Property Request Date [FMNN98]
/// </summary>
public DateTime? RequestDate { get => _requestDate; set => SetField(ref _requestDate, value); }
/// <summary>
/// Mortgage Property State [FMNN07]
/// </summary>
public StringEnumValue<State> State { get => _state; set => SetField(ref _state, value); }
/// <summary>
/// Mortgage Property Address [FMNN04]
/// </summary>
public string? StreetAddress { get => _streetAddress; set => SetField(ref _streetAddress, value); }
/// <summary>
/// Mortgage Property Is the Subject Property [FMNN28]
/// </summary>
[LoanFieldProperty(OptionsJson = "{\"Y\":\"Subject Property\"}")]
public bool? SubjectIndicator { get => _subjectIndicator; set => SetField(ref _subjectIndicator, value); }
/// <summary>
/// Mortgage Property From Title [FMNN37]
/// </summary>
public string? Title { get => _title; set => SetField(ref _title, value); }
/// <summary>
/// Mortgage Property From Fax [FMNN45]
/// </summary>
[LoanFieldProperty(Format = LoanFieldFormat.PHONE)]
public string? TitleFax { get => _titleFax; set => SetField(ref _titleFax, value); }
/// <summary>
/// Mortgage Property From Phone [FMNN44]
/// </summary>
[LoanFieldProperty(Format = LoanFieldFormat.PHONE)]
public string? TitlePhone { get => _titlePhone; set => SetField(ref _titlePhone, value); }
/// <summary>
/// Mortgage Property Unit Number [FMNN48]
/// </summary>
public string? UnitNumber { get => _unitNumber; set => SetField(ref _unitNumber, value); }
/// <summary>
/// Mortgage Property Unit Type [FMNN47]
/// </summary>
public StringEnumValue<UnitType> UnitType { get => _unitType; set => SetField(ref _unitType, value); }
/// <summary>
/// Mortgage Property URLA 2020 Street Address [FMNN50]
/// </summary>
public string? URLA2020StreetAddress { get => _uRLA2020StreetAddress; set => SetField(ref _uRLA2020StreetAddress, value); }
/// <summary>
/// Mortgage Property Year in Which Built [FMNN53]
/// </summary>
public string? YearBuilt { get => _yearBuilt; set => SetField(ref _yearBuilt, value); }
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Threading;
using Xunit;
public class Directory_Modify_FailSafe
{
private delegate void ExceptionCode();
private static bool s_pass = true;
// Don't change the files array - it's used both by the
// FileManipulationTest and the FileEnumeratorTest (in two different
// directories, of course).
private static String[] s_files = new String[5];
static Directory_Modify_FailSafe()
{
s_files[0] = "a.1";
s_files[1] = "b.1";
s_files[2] = "c.txt";
s_files[3] = "d.blah";
s_files[4] = "e.blah";
}
[Fact]
public static void DirectoryInfoTest()
{
const String dirName = "DirectoryInfoTestDir";
const String altDirName = "DirectoryInfoTestDir2";
// Clean up from any failed test run
if (Directory.Exists(dirName))
Directory.Delete(dirName, true);
if (Directory.Exists(altDirName))
Directory.Delete(altDirName, true);
DirectoryInfo di = new DirectoryInfo(dirName);
if (di.Exists)
throw new Exception("Directory exists at beginning of test!");
di.Create();
Stream s = File.Create(Path.Combine(di.Name, "foo"));
s.Dispose();
di.CreateSubdirectory("bar");
// Attributes test
di.Refresh(); // Reload attributes information!
FileAttributes attr = di.Attributes;
if ((attr & FileAttributes.Directory) != FileAttributes.Directory)
throw new Exception("Unexpected attributes on the directory - the directory bit wasn't set! Got: " + attr);
// BLORF TODO: blorf write set the system attribute?
// Rename directory via the MoveTo method, the move it back.
di = FailSafeDirectoryOperations.MoveDirectoryInfo(di, altDirName);
if (Directory.Exists(dirName))
throw new Exception("Old directory still exists after MoveTo!");
if (!Directory.Exists(altDirName))
throw new Exception("New directory doesn't exists after MoveTo!");
if (!di.Exists)
throw new Exception("DirectoryInfo says the directory doesn't exist after first MoveTo!");
di = FailSafeDirectoryOperations.MoveDirectoryInfo(di, dirName);
if (!di.Exists)
throw new Exception("DirectoryInfo says the directory doesn't exist after second MoveTo!");
// Get files and directories now.
FileInfo[] files = di.GetFiles();
if (files.Length != 1)
throw new Exception("GetFiles should have returned just one file! got: " + files.Length);
if (!"foo".Equals(files[0].Name))
throw new Exception("FileInfo's Name should have been foo, but was: " + files[0].Name);
DirectoryInfo[] dirs = di.GetDirectories();
if (dirs.Length != 1)
throw new Exception("GetDirectories should have returned just one dir! got: " + dirs.Length);
if (!"bar".Equals(dirs[0].Name))
throw new Exception("DirectoryInfo's Name should have been bar, but was: " + dirs[0].Name);
FileSystemInfo[] infos = di.GetFileSystemInfos();
if (infos.Length != 2)
throw new Exception("GetFileSystemInfos should have returned 2! got: " + infos.Length);
FileInfo tempFi = infos[0] as FileInfo;
DirectoryInfo tempDi = null;
if (tempFi == null)
{
tempFi = infos[1] as FileInfo;
tempDi = infos[0] as DirectoryInfo;
}
else
{
tempDi = infos[1] as DirectoryInfo;
}
if (!tempFi.Name.Equals("foo"))
throw new Exception("GetFileSystemInfo returned FileInfo with wrong name! got: " + tempFi.Name);
if (!tempDi.Name.Equals("bar"))
throw new Exception("GetFileSystemInfo returned DirectoryInfo with wrong name! got: " + tempDi.Name);
// Test DirectoryInfo.Name on something like "c:\bar\"
DirectoryInfo subDir = new DirectoryInfo(Path.Combine(di.Name, "bar") + Path.DirectorySeparatorChar);
if (!subDir.Name.Equals("bar"))
throw new Exception("Subdirectory name was wrong. Expected bar, Got: " + subDir.Name);
DirectoryInfo parent = subDir.Parent;
if (!DirNameEquals(parent.FullName, di.FullName))
throw new Exception("DI.FullName != SubDir.Parent.FullName! subdir full name: " + parent.FullName);
// Check more info about the DirectoryInfo
String rootName = Path.GetPathRoot(Directory.GetCurrentDirectory());
DirectoryInfo root = di.Root;
if (!rootName.Equals(root.Name))
throw new Exception(String.Format("Root directory name was wrong! rootName: {0} DI.Root.Name: {1}", rootName, root.Name));
// Test DirectoryInfo behavior for the root
string rootPath = root.FullName;
DirectoryInfo c = new DirectoryInfo(rootPath);
if (!rootPath.Equals(c.Name))
throw new Exception("DirectoryInfo name for root was wrong! got: " + c.Name);
if (!rootPath.Equals(c.FullName))
throw new Exception("DirectoryInfo FullName for root was wrong! got: " + c.FullName);
if (null != c.Parent)
throw new Exception("DirectoryInfo::Parent for root is not null!");
FailSafeDirectoryOperations.DeleteDirectoryInfo(di, true);
di.Refresh();
if (di.Exists)
throw new Exception("Directory still exists at end of test!");
Assert.True(s_pass);
}
private static bool DirNameEquals(String a, String b)
{
if (a.Length > 3 && a[a.Length - 1] == Path.DirectorySeparatorChar)
a = a.Substring(0, a.Length - 1);
if (b.Length > 3 && b[b.Length - 1] == Path.DirectorySeparatorChar)
b = b.Substring(0, b.Length - 1);
return a.Equals(b);
}
[Fact]
[ActiveIssue(1220)] // SetCurrentDirectory
public static void FileManipulationTest()
{
String dirName = "FileManipulationTest";
try
{
Directory.CreateDirectory(dirName);
}
catch (IOException)
{
/*
if (io.ErrorCode != 183) { // Path exists
Console.WriteLine("Error when creating directory "+dir);
Console.WriteLine(io.ErrorCode);
throw io;
}
*/
// assume the path exists.
// Clean out the directory
/*
FileEnumerator cleaner = new FileEnumerator(dir+"\\*");
while(cleaner.MoveNext())
if (!cleaner.Name.Equals(".") && !cleaner.Name.Equals(".."))
cleaner.Remove();
cleaner.Close();
*/
Directory.Delete(dirName, true);
Directory.CreateDirectory(dirName);
}
String origDir = Directory.GetCurrentDirectory();
Directory.SetCurrentDirectory(dirName);
CreateTestFiles();
Directory.SetCurrentDirectory(origDir);
DirectoryInfo dir = new DirectoryInfo(dirName);
// Try getting all files.
FileInfo[] found = dir.GetFiles("*");
if (found.Length != s_files.Length)
throw new Exception("After creating files, num found != num created. expected: " + s_files.Length + " got: " + found.Length);
for (int i = 0; i < s_files.Length; i++)
{
if (!s_files[i].Equals(found[i].Name))
throw new Exception("Couldn't find a file in the directory! thought I'd get: " + s_files[i] + " got: " + found[i]);
}
// Try getting a file that isn't there.
found = dir.GetFiles("this_file_doesnt_exist.nope");
if (found.Length != 0)
throw new Exception("Ack! Tried to do GetFiles(non-existant file) and got something!");
// Try getting a wildcard pattern that isn't there.
found = dir.GetFiles("*.nope");
if (found.Length != 0)
throw new Exception("Ack! Tried to do GetFiles(*.nope) and got something!");
// Try listing all .blah files.
found = dir.GetFiles("*.blah");
if (found.Length != 2)
throw new Exception("When looking for *.blah, found wrong number! expected: 2 got: " + found.Length);
if (!found[0].Name.Equals(s_files[3]))
throw new Exception("Found[0] wasn't files[3] when listing *.blah! got: " + found[0] + " expected: " + s_files[3]);
if (!found[1].Name.Equals(s_files[4]))
throw new Exception("Found[1] wasn't files[4] when listing *.blah! got: " + found[1]);
// Try listing all .txt files.
found = dir.GetFiles("*.txt");
if (found.Length != 1)
throw new Exception("When looking for *.txt, found wrong number! expected: 1 got: " + found.Length);
if (!found[0].Name.Equals(s_files[2]))
throw new Exception("Found[0] wasn't files[2] when listing *.txt! got: " + found[0]);
// Try listing all .1 files.
found = dir.GetFiles("*.1");
if (found.Length != 2)
throw new Exception("When looking for *.1, found wrong number! expected: 2 got: " + found.Length);
if (!found[0].Name.Equals(s_files[0]))
throw new Exception("Found[0] wasn't files[0] when listing *.1! got: " + found[0]);
if (!found[1].Name.Equals(s_files[1]))
throw new Exception("Found[1] wasn't files[1] when listing *.1! got: " + found[1]);
// Try listing all c* files.
found = dir.GetFiles("c*");
if (found.Length != 1)
throw new Exception("When looking for c*, found wrong number! expected: 1 got: " + found.Length);
if (!found[0].Name.Equals(s_files[2]))
throw new Exception("Found[0] wasn't files[2] when listing c*! got: " + found[0]);
// Copy then delete a file to make sure it's gone.
File.Copy(Path.Combine(dir.ToString(), s_files[0]), Path.Combine(dir.FullName, "newfile.new"));
found = dir.GetFiles("new*.new");
if (found.Length != 1)
throw new Exception("Didn't find copied file!");
if (!found[0].Name.Equals("newfile.new"))
throw new Exception("Didn't find newfile.new after copy! got: " + found[0]);
File.Delete(Path.Combine(dirName, "newfile.new"));
found = dir.GetFiles("new*.new");
if (found.Length != 0)
throw new Exception("new file wasn't deleted! " + found[0]);
String curDir = Directory.GetCurrentDirectory();
if (curDir == null)
throw new Exception("Ack! got null string from get current directory");
String newDir = Path.Combine(curDir, dirName);
Directory.SetCurrentDirectory(newDir);
if (!newDir.Equals(Directory.GetCurrentDirectory()))
throw new Exception("Ack! new directory didn't equal getcwd! " + Directory.GetCurrentDirectory());
if (!File.Exists(s_files[s_files.Length - 1]))
throw new Exception("Not in the new directory! Couldn't find last file!");
Directory.SetCurrentDirectory(curDir);
if (!curDir.Equals(Directory.GetCurrentDirectory()))
throw new Exception("Ack! old directory didn't equal getcwd! " + Directory.GetCurrentDirectory());
Directory.SetCurrentDirectory(newDir);
// Test CreateDirectories on a directory tree
Directory.CreateDirectory("a/b/c");
if (!Directory.Exists("a"))
throw new Exception("Directory a didn't exist!");
if (!Directory.Exists("a/b"))
throw new Exception("Directory a\\b didn't exist!");
if (!Directory.Exists("a/b/c"))
throw new Exception("Directory a\\b\\c didn't exist!");
Directory.Delete("a/b/c");
// Test creating one directory nested under existing ones
Directory.CreateDirectory("a/b/c");
if (!Directory.Exists("a/b/c"))
throw new Exception("Directory a\\b\\c didn't exist!");
// Delete "a\b\c\" recursively
Directory.Delete("a", true);
if (Directory.Exists("a"))
throw new Exception("Directory \"a\" still exists!");
try
{
Directory.CreateDirectory(s_files[0]);
}
catch (IOException)
{
/*
if (e.ErrorCode != 183) { // File Exists
Console.WriteLine("CreateDirectories threw: "+e);
Console.WriteLine(e.ErrorCode);
throw e;
}
*/
// Assume the path exists.
}
try
{
// SetCurrentDirectory on one that doesn't exist
Directory.SetCurrentDirectory("a/b"); // doesn't exist
throw new Exception("Ack! Set Current Directory to one that doesn't exist! should have thrown");
}
catch (DirectoryNotFoundException)
{
}
// Try FileEnumerator on files[] in the test directory
/*
FileEnumerator fe = new FileEnumerator("*");
// Skip over . and ..
fe.MoveNext();
if (fe.Name.Equals(".") && ((fe.Attributes & FileAttributes.Directory) != 0))
fe.MoveNext();
if (!fe.Name.Equals(".."))
throw new Exception("Trying to skip . and .. in FileEnumerator test - not what I expected "+fe.Name);
int num = 0;
Console.WriteLine("\tFileEnumerator Creation, Last Write, and Last Access Times and Size:");
while(fe.MoveNext()) {
if (!fe.Name.Equals(files[num]))
throw new Exception("FileEnumerator name wasn't what was expected! got: "+fe.Name);
Console.WriteLine("\t"+fe.CreationTime+"\t"+fe.LastWriteTime+"\t"+fe.LastAccessTime+" "+fe.Size);
num++;
}
if (num != files.Length)
throw new Exception("Files.Length wasn't equal number of files enumerated! got: "+num+" expected: "+files.Length);
if (fe.MoveNext())
throw new Exception("FileEnumerator GetNext returned true after enumeration finished!");
fe.Close();
*/
Directory.SetCurrentDirectory(curDir);
// Delete files.
for (int i = 0; i < s_files.Length; i++)
{
File.Delete(dirName + "/" + s_files[i]);
}
dir.Delete();
Assert.True(s_pass);
}
public static void CreateTestFiles()
{
for (int i = 0; i < s_files.Length; i++)
{
Stream fs = File.Create(s_files[i]);
BinaryWriter bw = new BinaryWriter(fs);
bw.Write(s_files[i]);
bw.Dispose();
}
}
[Fact]
public static void CreateDirTest()
{
String dir = "CreateDirTest";
try
{
Directory.Delete(dir);
}
catch (IOException) { }
Directory.CreateDirectory(dir);
Directory.Delete(dir);
Assert.True(s_pass);
}
[Fact]
public static void DeleteDirTest()
{
String dir = "DeleteDirTest";
try
{
Directory.CreateDirectory(dir);
}
catch (IOException)
{
}
try
{
Directory.Delete(dir);
Stream f = File.Create(dir);
f.Dispose();
File.Delete(dir);
}
catch (IOException io)
{
Console.WriteLine("File name: \"" + dir + '\"');
Console.WriteLine("caught IOException when trying to delete the dir then open file");
Console.WriteLine(io);
Console.WriteLine(io.StackTrace);
}
catch (Exception e)
{
Console.Error.WriteLine("Got unexpected exception from Delete or surrounding code: " + e);
Console.WriteLine(e.StackTrace);
throw;
}
Assert.True(s_pass);
}
[Fact]
[ActiveIssue(1220)] // SetCurrentDirectory
public static void GetSubdirectoriesTest()
{
String testDir = "GetSubdirectoriesTempDir";
if (Directory.Exists(Path.Combine(".", testDir)))
{
Console.WriteLine("Test didn't clean up right, trying to delete directory \"" + testDir + "\"");
try
{
Directory.Delete(testDir);
}
catch (Exception)
{
Console.WriteLine("Ack! Test couldn't clean up after itself. Delete .\\\"" + testDir + "\"");
throw;
}
}
String oldDirectory = Directory.GetCurrentDirectory();
Directory.CreateDirectory(testDir);
Directory.SetCurrentDirectory(testDir);
DirectoryInfo current = new DirectoryInfo(".");
Directory.CreateDirectory("a1");
Directory.CreateDirectory("a2");
current.CreateSubdirectory("b1");
Directory.CreateDirectory("b2");
Stream junk = File.Create("c1");
junk.Dispose();
try
{
DirectoryInfo[] dirs = current.GetDirectories("*");
if (dirs == null)
throw new Exception("Directory Names array was NULL!");
/*
Console.WriteLine("Directory names: ");
for(int i=0; i<dirs.Length; i++)
Console.WriteLine(dirs[i]);
*/
if (dirs.Length != 4) // 4 directories + "." & ".." ( looks like . and .. are not returned )
throw new Exception("Directory names array should have been length 4! was: " + dirs.Length);
// Now try wildcards, such as "*1" and "a*"
dirs = current.GetDirectories("*1");
if (dirs.Length != 2)
throw new Exception("Directory names array should have been length 2 when asking for \"*1\", but was: " + dirs.Length);
dirs = current.GetDirectories("a*");
if (dirs.Length != 2)
throw new Exception("Directory names array should have been length 2 after looking for \"a*\", but was: " + dirs.Length);
}
catch (Exception)
{
Console.WriteLine("Error in GetDirectoryNamesTest - throwing an exception");
throw;
}
finally
{
try
{
Console.Out.Flush();
Directory.Delete("a1");
Directory.Delete("a2");
Directory.Delete("b1");
Directory.Delete("b2");
File.Delete("c1");
Directory.SetCurrentDirectory(oldDirectory);
//Directory.Delete(testDir);
current.Delete(false); // We've deleted everything else, it should clean up fine.
}
catch (Exception ex)
{
Console.Write("Ran into error cleaning up GetDirectoriesTest... {0}", ex.ToString());
throw;
}
}
Assert.True(s_pass);
}
private void DeleteFile(String fileName)
{
if (File.Exists(fileName))
File.Delete(fileName);
}
//Checks for error
private static bool Eval(bool expression, String msg, params Object[] values)
{
return Eval(expression, String.Format(msg, values));
}
private static bool Eval(bool expression, String msg)
{
if (!expression)
{
s_pass = false;
Console.WriteLine(msg);
}
return expression;
}
//A short cut API that doesn't need to repeat the error
private static bool Eval<T>(T actual, T expected, String errorMsg)
{
bool retValue = expected == null ? actual == null : expected.Equals(actual);
if (!retValue)
{
String value = String.Format("{0} Expected: {1}, Actual: {2}", errorMsg, (null == expected ? "<null>" : expected.ToString()), (null == actual ? "<null>" : actual.ToString()));
Eval(retValue, value);
}
return retValue;
}
//Checks for a particular type of exception
private static void CheckException<E>(ExceptionCode test, string error, params Object[] values)
{
CheckException<E>(test, error, null, values);
}
//Checks for a particular type of exception and an Exception msg in the English locale
private static void CheckException<E>(ExceptionCode test, string error, String msgExpected, params Object[] values)
{
bool exception = false;
String exErrMsg = String.Format(error, values);
try
{
test();
error = String.Format("{0} Exception NOT thrown ", exErrMsg);
}
catch (Exception e)
{
if (e.GetType() == typeof(E))
{
exception = true;
if (System.Globalization.CultureInfo.CurrentUICulture.Name == "en-US" && msgExpected != null && e.Message != msgExpected)
{
exception = false;
error = String.Format("{0} Message Different: <{1}>", exErrMsg, e.Message);
}
}
else
error = String.Format("{0} Exception type: {1}", exErrMsg, e.GetType().Name);
}
Eval(exception, error);
}
//Checks for a 2 types of exceptions
private static void CheckException<E, V>(ExceptionCode test, string error)
{
bool exception = false;
try
{
test();
error = String.Format("{0} Exception NOT thrown ", error);
}
catch (Exception e)
{
if (e.GetType() == typeof(E) || e.GetType() == typeof(V))
{
exception = true;
}
else
error = String.Format("{0} Exception type: {1}", error, e.GetType().Name);
}
Eval(exception, error);
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// Template Source: Templates\CSharp\Requests\EntityRequest.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type ExtensionRequest.
/// </summary>
public partial class ExtensionRequest : BaseRequest, IExtensionRequest
{
/// <summary>
/// Constructs a new ExtensionRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public ExtensionRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Creates the specified Extension using POST.
/// </summary>
/// <param name="extensionToCreate">The Extension to create.</param>
/// <returns>The created Extension.</returns>
public System.Threading.Tasks.Task<Extension> CreateAsync(Extension extensionToCreate)
{
return this.CreateAsync(extensionToCreate, CancellationToken.None);
}
/// <summary>
/// Creates the specified Extension using POST.
/// </summary>
/// <param name="extensionToCreate">The Extension to create.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created Extension.</returns>
public async System.Threading.Tasks.Task<Extension> CreateAsync(Extension extensionToCreate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
var newEntity = await this.SendAsync<Extension>(extensionToCreate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(newEntity);
return newEntity;
}
/// <summary>
/// Deletes the specified Extension.
/// </summary>
/// <returns>The task to await.</returns>
public System.Threading.Tasks.Task DeleteAsync()
{
return this.DeleteAsync(CancellationToken.None);
}
/// <summary>
/// Deletes the specified Extension.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The task to await.</returns>
public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken)
{
this.Method = "DELETE";
await this.SendAsync<Extension>(null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified Extension.
/// </summary>
/// <returns>The Extension.</returns>
public System.Threading.Tasks.Task<Extension> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the specified Extension.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The Extension.</returns>
public async System.Threading.Tasks.Task<Extension> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var retrievedEntity = await this.SendAsync<Extension>(null, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(retrievedEntity);
return retrievedEntity;
}
/// <summary>
/// Updates the specified Extension using PATCH.
/// </summary>
/// <param name="extensionToUpdate">The Extension to update.</param>
/// <returns>The updated Extension.</returns>
public System.Threading.Tasks.Task<Extension> UpdateAsync(Extension extensionToUpdate)
{
return this.UpdateAsync(extensionToUpdate, CancellationToken.None);
}
/// <summary>
/// Updates the specified Extension using PATCH.
/// </summary>
/// <param name="extensionToUpdate">The Extension to update.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The updated Extension.</returns>
public async System.Threading.Tasks.Task<Extension> UpdateAsync(Extension extensionToUpdate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "PATCH";
var updatedEntity = await this.SendAsync<Extension>(extensionToUpdate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(updatedEntity);
return updatedEntity;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IExtensionRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IExtensionRequest Expand(Expression<Func<Extension, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IExtensionRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IExtensionRequest Select(Expression<Func<Extension, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Initializes any collection properties after deserialization, like next requests for paging.
/// </summary>
/// <param name="extensionToInitialize">The <see cref="Extension"/> with the collection properties to initialize.</param>
private void InitializeCollectionProperties(Extension extensionToInitialize)
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Globalization;
using Xunit;
namespace System.Drawing.PrimitivesTests
{
public class PointTests
{
[Fact]
public void DefaultConstructorTest()
{
Assert.Equal(Point.Empty, new Point());
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void NonDefaultConstructorTest(int x, int y)
{
Point p1 = new Point(x, y);
Point p2 = new Point(new Size(x, y));
Assert.Equal(p1, p2);
}
[Theory]
[InlineData(int.MaxValue)]
[InlineData(int.MinValue)]
[InlineData(0)]
public void SingleIntConstructorTest(int x)
{
Point p1 = new Point(x);
Point p2 = new Point((short)(x & 0xFFFF), (short)((x >> 16) & 0xFFFF));
Assert.Equal(p1, p2);
}
[Fact]
public void IsEmptyDefaultsTest()
{
Assert.True(Point.Empty.IsEmpty);
Assert.True(new Point().IsEmpty);
Assert.True(new Point(0, 0).IsEmpty);
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
public void IsEmptyRandomTest(int x, int y)
{
Assert.False(new Point(x, y).IsEmpty);
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void CoordinatesTest(int x, int y)
{
Point p = new Point(x, y);
Assert.Equal(x, p.X);
Assert.Equal(y, p.Y);
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void PointFConversionTest(int x, int y)
{
PointF p = new Point(x, y);
Assert.Equal(new PointF(x, y), p);
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void SizeConversionTest(int x, int y)
{
Size sz = (Size)new Point(x, y);
Assert.Equal(new Size(x, y), sz);
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void ArithmeticTest(int x, int y)
{
Point p = new Point(x, y);
Size s = new Size(y, x);
Point addExpected = new Point(x + y, y + x);
Point subExpected = new Point(x - y, y - x);
Assert.Equal(addExpected, p + s);
Assert.Equal(subExpected, p - s);
Assert.Equal(addExpected, Point.Add(p, s));
Assert.Equal(subExpected, Point.Subtract(p, s));
}
[Theory]
[InlineData(float.MaxValue, float.MinValue)]
[InlineData(float.MinValue, float.MinValue)]
[InlineData(float.MaxValue, float.MaxValue)]
[InlineData(0, 0)]
public void PointFMathematicalTest(float x, float y)
{
PointF pf = new PointF(x, y);
Point pCeiling = new Point((int)Math.Ceiling(x), (int)Math.Ceiling(y));
Point pTruncate = new Point((int)x, (int)y);
Point pRound = new Point((int)Math.Round(x), (int)Math.Round(y));
Assert.Equal(pCeiling, Point.Ceiling(pf));
Assert.Equal(pRound, Point.Round(pf));
Assert.Equal(pTruncate, Point.Truncate(pf));
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void OffsetTest(int x, int y)
{
Point p1 = new Point(x, y);
Point p2 = new Point(y, x);
p1.Offset(p2);
Assert.Equal(p2.X + p2.Y, p1.X);
Assert.Equal(p1.X, p1.Y);
p2.Offset(x, y);
Assert.Equal(p1, p2);
}
[Theory]
[InlineData(int.MaxValue, int.MinValue)]
[InlineData(int.MinValue, int.MinValue)]
[InlineData(int.MaxValue, int.MaxValue)]
[InlineData(0, 0)]
public void EqualityTest(int x, int y)
{
Point p1 = new Point(x, y);
Point p2 = new Point(x / 2 - 1, y / 2 - 1);
Point p3 = new Point(x, y);
Assert.True(p1 == p3);
Assert.True(p1 != p2);
Assert.True(p2 != p3);
Assert.True(p1.Equals(p3));
Assert.False(p1.Equals(p2));
Assert.False(p2.Equals(p3));
Assert.Equal(p1.GetHashCode(), p3.GetHashCode());
}
[Fact]
public static void EqualityTest_NotPoint()
{
var point = new Point(0, 0);
Assert.False(point.Equals(null));
Assert.False(point.Equals(0));
Assert.False(point.Equals(new PointF(0, 0)));
}
[Fact]
public static void GetHashCodeTest()
{
var point = new Point(10, 10);
Assert.Equal(point.GetHashCode(), new Point(10, 10).GetHashCode());
Assert.NotEqual(point.GetHashCode(), new Point(20, 10).GetHashCode());
Assert.NotEqual(point.GetHashCode(), new Point(10, 20).GetHashCode());
}
[Theory]
[InlineData(0, 0, 0, 0)]
[InlineData(1, -2, 3, -4)]
public void ConversionTest(int x, int y, int width, int height)
{
Rectangle rect = new Rectangle(x, y, width, height);
RectangleF rectF = rect;
Assert.Equal(x, rectF.X);
Assert.Equal(y, rectF.Y);
Assert.Equal(width, rectF.Width);
Assert.Equal(height, rectF.Height);
}
[Theory]
[InlineData(0, 0)]
[InlineData(5, -5)]
public void ToStringTest(int x, int y)
{
Point p = new Point(x, y);
Assert.Equal(string.Format(CultureInfo.CurrentCulture, "{{X={0},Y={1}}}", p.X, p.Y), p.ToString());
}
}
}
| |
using Newtonsoft.Json;
namespace Nest
{
[JsonObject(MemberSerialization.OptIn)]
public class CatNodesRecord : ICatRecord
{
[JsonProperty("id")]
internal string _id { get; set; }
[JsonProperty("nodeId")]
internal string _nodeId { get; set; }
public string NodeId => this._id ?? this._nodeId;
[JsonProperty("pid")]
internal string _pid { get; set; }
[JsonProperty("p")]
internal string _p { get; set; }
public string Pid => this._p ?? this._pid;
[JsonProperty("ip")]
internal string _ip { get; set; }
[JsonProperty("i")]
internal string _i { get; set; }
public string Ip => this._i ?? this._ip;
[JsonProperty("port")]
internal string _port { get; set; }
[JsonProperty("po")]
internal string _po { get; set; }
public string Port => this._po ?? this._port;
[JsonProperty("version")]
internal string _version { get; set; }
[JsonProperty("v")]
internal string _v { get; set; }
public string Version => this._v ?? this._version;
[JsonProperty("build")]
internal string _build { get; set; }
[JsonProperty("b")]
internal string _b { get; set; }
public string Build => this._b ?? this._build;
[JsonProperty("jdk")]
internal string _jdk { get; set; }
[JsonProperty("j")]
internal string _j { get; set; }
public string Jdk => this._j ?? this._jdk;
[JsonProperty("disk.avail")]
internal string _disk_avail { get; set; }
[JsonProperty("d")]
internal string _d { get; set; }
[JsonProperty("disk")]
internal string _disk { get; set; }
[JsonProperty("diskAvail")]
internal string _diskAvail { get; set; }
public string DiskAvailable => this._diskAvail ?? this._disk ?? this._d ?? this._disk_avail;
[JsonProperty("heap.current")]
internal string _heap_current { get; set; }
[JsonProperty("hc")]
internal string _hc { get; set; }
[JsonProperty("heapCurrent")]
internal string _heapCurrent { get; set; }
public string HeapCurrent => this._heapCurrent ?? this._hc ?? this._heap_current;
[JsonProperty("heap.percent")]
internal string _heap_percent { get; set; }
[JsonProperty("hp")]
internal string _hp { get; set; }
[JsonProperty("heapPercent")]
internal string _heapPercent { get; set; }
public string HeapPercent => this._heapPercent ?? this._hp ?? this._heap_percent;
[JsonProperty("heap.max")]
internal string _heap_max { get; set; }
[JsonProperty("hm")]
internal string _hm { get; set; }
[JsonProperty("heapMax")]
internal string _heapMax { get; set; }
public string HeapMax => this._heapMax ?? this._hm ?? this._heap_max;
[JsonProperty("ram.current")]
internal string _ram_current { get; set; }
[JsonProperty("rc")]
internal string _rc { get; set; }
[JsonProperty("ramCurrent")]
internal string _ramCurrent { get; set; }
public string RamCurrent => this._ramCurrent ?? this._rc ?? this._ram_current;
[JsonProperty("ram.percent")]
internal string _ram_percent { get; set; }
[JsonProperty("rp")]
internal string _rp { get; set; }
[JsonProperty("ramPercent")]
internal string _ramPercent { get; set; }
public string RamPercent => this._ramPercent ?? this._rp ?? this._ram_percent;
[JsonProperty("ram.max")]
internal string _ram_max { get; set; }
[JsonProperty("rm")]
internal string _rm { get; set; }
[JsonProperty("ramMax")]
internal string _ramMax { get; set; }
public string RamMax => this._ramMax ?? this._rm ?? this._ram_max;
[JsonProperty("load_1m")]
public string LoadOneMinute { get; internal set; }
[JsonProperty("load_5m")]
public string LoadFiveMinute { get; internal set; }
[JsonProperty("load_15m")]
public string LoadFifteenMinute { get; internal set; }
[JsonProperty("cpu")]
public string CPU { get; internal set; }
[JsonProperty("uptime")]
internal string _uptime { get; set; }
[JsonProperty("u")]
internal string _u { get; set; }
public string Uptime => this._u ?? this._uptime;
[JsonProperty("node.role")]
internal string _node_role { get; set; }
[JsonProperty("r")]
internal string _r { get; set; }
[JsonProperty("dc")]
internal string _dc { get; set; }
[JsonProperty("data/client")]
internal string _data_client { get; set; }
[JsonProperty("nodeRole")]
internal string _nodeRole { get; set; }
public string NodeRole => this._nodeRole ?? this._data_client ?? this._dc ?? this._r ?? this._node_role;
[JsonProperty("master")]
internal string _master { get; set; }
[JsonProperty("m")]
internal string _m { get; set; }
public string Master => this._m ?? this._master;
[JsonProperty("name")]
internal string _name { get; set; }
[JsonProperty("n")]
internal string _n { get; set; }
public string Name => this._n ?? this._name;
[JsonProperty("completion.size")]
internal string _completion_size { get; set; }
[JsonProperty("cs")]
internal string _cs { get; set; }
[JsonProperty("completionSize")]
internal string _completionSize { get; set; }
public string CompletionSize => this._completionSize ?? this._cs ?? this._completion_size;
[JsonProperty("fielddata.memory_size")]
internal string _fielddata_memory_size { get; set; }
[JsonProperty("fm")]
internal string _fm { get; set; }
[JsonProperty("fielddataMemory")]
internal string _fielddataMemory { get; set; }
public string FielddataMemory => this._fielddataMemory ?? this._fm ?? this._fielddata_memory_size;
[JsonProperty("fielddata.evictions")]
internal string _fielddata_evictions { get; set; }
[JsonProperty("fe")]
internal string _fe { get; set; }
[JsonProperty("fielddataEvictions")]
internal string _fielddataEvictions { get; set; }
public string FielddataEvictions => this._fielddataEvictions ?? this._fe ?? this._fielddata_evictions;
[JsonProperty("filter_cache.memory_size")]
internal string _filter_cache_memory_size { get; set; }
[JsonProperty("fcm")]
internal string _fcm { get; set; }
[JsonProperty("filterCacheMemory")]
internal string _filterCacheMemory { get; set; }
public string FilterCacheMemory => this._filterCacheMemory ?? this._fcm ?? this._filter_cache_memory_size;
[JsonProperty("filter_cache.evictions")]
internal string _filter_cache_evictions { get; set; }
[JsonProperty("fce")]
internal string _fce { get; set; }
[JsonProperty("filterCacheEvictions")]
internal string _filterCacheEvictions { get; set; }
public string FilterCacheEvictions => this._filterCacheEvictions ?? this._fce ?? this._filter_cache_evictions;
[JsonProperty("flush.total")]
internal string _flush_total { get; set; }
[JsonProperty("ft")]
internal string _ft { get; set; }
[JsonProperty("flushTotal")]
internal string _flushTotal { get; set; }
public string FlushTotal => this._flushTotal ?? this._ft ?? this._flush_total;
[JsonProperty("flush.total_time")]
internal string _flush_total_time { get; set; }
[JsonProperty("ftt")]
internal string _ftt { get; set; }
[JsonProperty("flushTotalTime")]
internal string _flushTotalTime { get; set; }
public string FlushTotalTime => this._flushTotalTime ?? this._ftt ?? this._flush_total_time;
[JsonProperty("file_desc.current")]
internal int? _file_desc_current { get; set; }
[JsonProperty("fdc")]
internal int? _fdc { get; set; }
[JsonProperty("fileDescriptorCurrent")]
internal int? _fileDescriptorCurrent { get; set; }
public int? FileDescriptorCurrent => this._fileDescriptorCurrent ?? this._fdc ?? this._file_desc_current;
[JsonProperty("file_desc.percent")]
internal int? _file_desc_percent { get; set; }
[JsonProperty("fdp")]
internal int? _fdp { get; set; }
[JsonProperty("fileDescriptorPercent")]
internal int? _fileDescriptorPercent { get; set; }
public int? FileDescriptorPercent => this._fileDescriptorPercent ?? this._fdp ?? this._file_desc_percent;
[JsonProperty("file_desc.max")]
internal int? _file_desc_max { get; set; }
[JsonProperty("fdm")]
internal int? _fdm { get; set; }
[JsonProperty("fileDescriptorMax")]
internal int? _fileDescriptorMax { get; set; }
public int? FileDescriptorMax => this._fileDescriptorMax ?? this._fdm ?? this._file_desc_max;
[JsonProperty("get.current")]
internal string _get_current { get; set; }
[JsonProperty("gc")]
internal string _gc { get; set; }
[JsonProperty("getCurrent")]
internal string _getCurrent { get; set; }
public string GetCurrent => this._getCurrent ?? this._gc ?? this._get_current;
[JsonProperty("get.time")]
internal string _get_time { get; set; }
[JsonProperty("gti")]
internal string _gti { get; set; }
[JsonProperty("getTime")]
internal string _getTime { get; set; }
public string GetTime => this._getTime ?? this._gti ?? this._get_time;
[JsonProperty("get.total")]
internal string _get_total { get; set; }
[JsonProperty("gto")]
internal string _gto { get; set; }
[JsonProperty("getTotal")]
internal string _getTotal { get; set; }
public string GetTotal => this._getTotal ?? this._gto ?? this._get_total;
[JsonProperty("get.exists_time")]
internal string _get_exists_time { get; set; }
[JsonProperty("geti")]
internal string _geti { get; set; }
[JsonProperty("getExistsTime")]
internal string _getExistsTime { get; set; }
public string GetExistsTime => this._getExistsTime ?? this._geti ?? this._get_exists_time;
[JsonProperty("get.exists_total")]
internal string _get_exists_total { get; set; }
[JsonProperty("geto")]
internal string _geto { get; set; }
[JsonProperty("getExistsTotal")]
internal string _getExistsTotal { get; set; }
public string GetExistsTotal => this._getExistsTotal ?? this._geto ?? this._get_exists_total;
[JsonProperty("get.missing_time")]
internal string _get_missing_time { get; set; }
[JsonProperty("gmti")]
internal string _gmti { get; set; }
[JsonProperty("getMissingTime")]
internal string _getMissingTime { get; set; }
public string GetMissingTime => this._getMissingTime ?? this._gmti ?? this._get_missing_time;
[JsonProperty("get.missing_total")]
internal string _get_missing_total { get; set; }
[JsonProperty("gmto")]
internal string _gmto { get; set; }
[JsonProperty("getMissingTotal")]
internal string _getMissingTotal { get; set; }
public string GetMissingTotal => this._getMissingTotal ?? this._gmto ?? this._get_missing_total;
[JsonProperty("id_cache.memory_size")]
internal string _id_cache_memory_size { get; set; }
[JsonProperty("im")]
internal string _im { get; set; }
[JsonProperty("idCacheMemory")]
internal string _idCacheMemory { get; set; }
public string IdCacheMemory => this._idCacheMemory ?? this._im ?? this._id_cache_memory_size;
[JsonProperty("indexing.delete_current")]
internal string _indexing_delete_current { get; set; }
[JsonProperty("idc")]
internal string _idcs { get; set; }
[JsonProperty("indexingDeleteCurrent")]
internal string _indexingDeleteCurrent { get; set; }
public string IndexingDeleteCurrent => this._indexingDeleteCurrent ?? this._idcs ?? this._indexing_delete_current;
[JsonProperty("indexing.delete_time")]
internal string _indexing_delete_time { get; set; }
[JsonProperty("idti")]
internal string _idti { get; set; }
[JsonProperty("indexingDeleteTime")]
internal string _indexingDeleteTime { get; set; }
public string IndexingDeleteTime => this._indexingDeleteTime ?? this._idti ?? this._indexing_delete_time;
[JsonProperty("indexing.delete_total")]
internal string _indexing_delete_total { get; set; }
[JsonProperty("idto")]
internal string _idto { get; set; }
[JsonProperty("indexingDeleteTotal")]
internal string _indexingDeleteTotal { get; set; }
public string IndexingDeleteTotal => this._indexingDeleteTotal ?? this._idto ?? this._indexing_delete_total;
[JsonProperty("indexing.index_current")]
internal string _indexing_index_current { get; set; }
[JsonProperty("iic")]
internal string _iic { get; set; }
[JsonProperty("indexingIndexCurrent")]
internal string _indexingIndexCurrent { get; set; }
public string IndexingIndexCurrent => this._indexingIndexCurrent ?? this._iic ?? this._indexing_index_current;
[JsonProperty("indexing.index_time")]
internal string _indexing_index_time { get; set; }
[JsonProperty("iiti")]
internal string _iiti { get; set; }
[JsonProperty("indexingIndexTime")]
internal string _indexingIndexTime { get; set; }
public string IndexingIndexTime => this._indexingIndexTime ?? this._iiti ?? this._indexing_index_time;
[JsonProperty("indexing.index_total")]
internal string _indexing_index_total { get; set; }
[JsonProperty("iito")]
internal string _iito { get; set; }
[JsonProperty("indexingIndexTotal")]
internal string _indexingIndexTotal { get; set; }
public string IndexingIndexTotal => this._indexingIndexTotal ?? this._iito ?? this._indexing_index_total;
[JsonProperty("merges.current")]
internal string _merges_current { get; set; }
[JsonProperty("mc")]
internal string _mc { get; set; }
[JsonProperty("mergesCurrent")]
internal string _mergesCurrent { get; set; }
public string MergesCurrent => this._mergesCurrent ?? this._mc ?? this._merges_current;
[JsonProperty("merges.current_docs")]
internal string _merges_current_docs { get; set; }
[JsonProperty("mcd")]
internal string _mcd { get; set; }
[JsonProperty("mergesCurrentDocs")]
internal string _mergesCurrentDocs { get; set; }
public string MergesCurrentDocs => this._mergesCurrentDocs ?? this._mcd ?? this._merges_current_docs;
[JsonProperty("merges.current_size")]
internal string _merges_current_size { get; set; }
[JsonProperty("mcs")]
internal string _mcs { get; set; }
[JsonProperty("mergesCurrentSize")]
internal string _mergesCurrentSize { get; set; }
public string MergesCurrentSize => this._mergesCurrentSize ?? this._mcs ?? this._merges_current_size;
[JsonProperty("merges.total")]
internal string _merges_total { get; set; }
[JsonProperty("mt")]
internal string _mt { get; set; }
[JsonProperty("mergesTotal")]
internal string _mergesTotal { get; set; }
public string MergesTotal => this._mergesTotal ?? this._mt ?? this._merges_total;
[JsonProperty("merges.total_docs")]
internal string _merges_total_docs { get; set; }
[JsonProperty("mtd")]
internal string _mtd { get; set; }
[JsonProperty("mergesTotalDocs")]
internal string _mergesTotalDocs { get; set; }
public string MergesTotalDocs => this._mergesTotalDocs ?? this._mtd ?? this._merges_total_docs;
[JsonProperty("merges.total_time")]
internal string _merges_total_time { get; set; }
[JsonProperty("mtt")]
internal string _mtt { get; set; }
[JsonProperty("mergesTotalTime")]
internal string _mergesTotalTime { get; set; }
public string MergesTotalTime => this._mergesTotalTime ?? this._mtt ?? this._merges_total_time;
[JsonProperty("percolate.current")]
internal string _percolate_current { get; set; }
[JsonProperty("pc")]
internal string _pc { get; set; }
[JsonProperty("percolateCurrent")]
internal string _percolateCurrent { get; set; }
public string PercolateCurrent => this._percolateCurrent ?? this._pc ?? this._percolate_current;
[JsonProperty("percolate.memory_size")]
internal string _percolate_memory_size { get; set; }
[JsonProperty("pm")]
internal string _pm { get; set; }
[JsonProperty("percolateMemory")]
internal string _percolateMemory { get; set; }
public string PercolateMemory => this._percolateMemory ?? this._pm ?? this._percolate_memory_size;
[JsonProperty("percolate.queries")]
internal string _percolate_queries { get; set; }
[JsonProperty("pq")]
internal string _pq { get; set; }
[JsonProperty("percolateQueries")]
internal string _percolateQueries { get; set; }
public string PercolateQueries => this._percolate_queries ?? this._pq ?? this._percolate_queries;
[JsonProperty("percolate.time")]
internal string _percolate_time { get; set; }
[JsonProperty("pti")]
internal string _pti { get; set; }
[JsonProperty("percolateTime")]
internal string _percolateTime { get; set; }
public string PercolateTime => this._percolateTime ?? this._pti ?? this._percolate_time;
[JsonProperty("percolate.total")]
internal string _percolate_total { get; set; }
[JsonProperty("pto")]
internal string _pto { get; set; }
[JsonProperty("percolateTotal")]
internal string _percolateTotal { get; set; }
public string PercolateTotal => this._percolateTotal ?? this._pto ?? this._percolate_total;
[JsonProperty("refresh.total")]
internal string _refresh_total { get; set; }
[JsonProperty("rto")]
internal string _rto { get; set; }
[JsonProperty("refreshTotal")]
internal string _refreshTotal { get; set; }
public string RefreshTotal => this._refreshTotal ?? this._rto ?? this._refresh_total;
[JsonProperty("refresh.time")]
internal string _refresh_time { get; set; }
[JsonProperty("rti")]
internal string _rti { get; set; }
[JsonProperty("refreshTime")]
internal string _refreshTime { get; set; }
public string RefreshTime => this._refreshTime ?? this._rti ?? this._refreshTime;
[JsonProperty("search.fetch_current")]
internal string _search_fetch_current { get; set; }
[JsonProperty("sfc")]
internal string _sfc { get; set; }
[JsonProperty("searchFetchCurrent")]
internal string _searchFetchCurrent { get; set; }
public string SearchFetchCurrent => this._searchFetchCurrent ?? this._sfc ?? this._search_fetch_current;
[JsonProperty("search.fetch_time")]
internal string _search_fetch_time { get; set; }
[JsonProperty("sfti")]
internal string _sfti { get; set; }
[JsonProperty("searchFetchTime")]
internal string _searchFetchTime { get; set; }
public string SearchFetchTime => this._searchFetchTime ?? this._sfti ?? this._search_fetch_time;
[JsonProperty("search.fetch_total")]
internal string _search_fetch_total { get; set; }
[JsonProperty("sfto")]
internal string _sfto { get; set; }
[JsonProperty("searchFetchTotal")]
internal string _searchFetchTotal { get; set; }
public string SearchFetchTotal => this._searchFetchTotal ?? this._sfto ?? this._searchFetchTotal;
[JsonProperty("search.open_contexts")]
internal string _search_open_contexts { get; set; }
[JsonProperty("so")]
internal string _so { get; set; }
[JsonProperty("searchOpenContexts")]
internal string _searchOpenContexts { get; set; }
public string SearchOpenContexts => this._searchOpenContexts ?? this._so ?? this._search_open_contexts;
[JsonProperty("search.query_current")]
internal string _search_query_current { get; set; }
[JsonProperty("sqc")]
internal string _sqc { get; set; }
[JsonProperty("searchQueryCurrent")]
internal string _searchQueryCurrent { get; set; }
public string SearchQueryCurrent => this._searchQueryCurrent ?? this._sqc ?? this._search_query_current;
[JsonProperty("search.query_time")]
internal string _search_query_time { get; set; }
[JsonProperty("sqti")]
internal string _sqti { get; set; }
[JsonProperty("searchQueryTime")]
internal string _searchQueryTime { get; set; }
public string SearchQueryTime => this._searchQueryTime ?? this._sqti ?? this._search_query_time;
[JsonProperty("search.query_total")]
internal string _search_query_total { get; set; }
[JsonProperty("sqto")]
internal string _sqto { get; set; }
[JsonProperty("searchQueryTotal")]
internal string _searchQueryTotal { get; set; }
public string SearchQueryTotal => this._searchQueryTotal ?? this._sqto ?? this._search_query_total;
[JsonProperty("segments.count")]
internal string _segments_count { get; set; }
[JsonProperty("sc")]
internal string _sc { get; set; }
[JsonProperty("segmentsCount")]
internal string _segmentsCount { get; set; }
public string SegmentsCount => this._segmentsCount ?? this._sc ?? this._segmentsCount;
[JsonProperty("segments.memory")]
internal string _segments_memory { get; set; }
[JsonProperty("sm")]
internal string _sm { get; set; }
[JsonProperty("segmentsMemory")]
internal string _segmentsMemory { get; set; }
public string SegmentsMemory => this._segmentsMemory ?? this._sm ?? this._segments_memory;
[JsonProperty("segments.index_writer_memory")]
internal string _segments_index_writer_memory { get; set; }
[JsonProperty("siwm")]
internal string _siwm { get; set; }
[JsonProperty("segmentsIndexWriterMemory")]
internal string _segmentsIndexWriterMemory { get; set; }
public string SegmentsIndexWriterMemory => this._segmentsIndexWriterMemory ?? this._siwm ?? this._segments_index_writer_memory;
[JsonProperty("segments.index_writer_max_memory")]
internal string _segments_index_writer_max_memory { get; set; }
[JsonProperty("siwmx")]
internal string _siwmx { get; set; }
[JsonProperty("segmentsIndexWriterMaxMemory")]
internal string _segmentsIndexWriterMaxMemory { get; set; }
public string SegmentsIndexWriterMaxMemory => this._segmentsIndexWriterMaxMemory ?? this._siwmx ?? this._segments_index_writer_max_memory;
[JsonProperty("segments.version_map_memory")]
internal string _segments_version_map_memory { get; set; }
[JsonProperty("svmm")]
internal string _svmm { get; set; }
[JsonProperty("segmentsVersionMapMemory")]
internal string _segmentsVersionMapMemory { get; set; }
public string SegmentsVersionMapMemory => this._segmentsVersionMapMemory ?? this._svmm ?? this._segments_version_map_memory;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Xml.Linq;
using Xunit;
namespace CoreXml.Test.XLinq.FunctionalTests.EventsTests
{
public class EventsAddBeforeSelf
{
public static object[][] ExecuteXDocumentVariationParams = new object[][] {
new object[] { new XNode[] { new XElement("element") }, new XComment("Comment") },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object[] { new XNode[] { new XDocumentType("root", "", "", "") }, new XElement("root") },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object[] { new XNode[] { new XComment("Comment") }, new XDocumentType("root", "", "", "") },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") }
};
[Theory, MemberData("ExecuteXDocumentVariationParams")]
public void ExecuteXDocumentVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> toAddList = toAdd.OfType<XNode>();
XDocument xDoc = new XDocument(contextNode);
XDocument xDocOriginal = new XDocument(xDoc);
using (UndoManager undo = new UndoManager(xDoc))
{
undo.Group();
using (EventsHelper docHelper = new EventsHelper(xDoc))
{
using (EventsHelper nodeHelper = new EventsHelper(contextNode))
{
contextNode.AddBeforeSelf(toAdd);
Assert.True(toAddList.SequenceEqual(contextNode.NodesBeforeSelf(), XNode.EqualityComparer), "Nodes not added correctly!");
nodeHelper.Verify(0);
}
docHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(XNode.DeepEquals(xDoc, xDocOriginal), "Undo did not work!");
}
}
public static object[][] ExecuteXElementVariationParams = new object[][] {
new object[] { new XNode[] { new XElement("element") }, new XText("some text") },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object[] { new XNode[] { new XCData("x+y >= z-m") }, new XElement("child") },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object[] { new XNode[] { new XComment("Comment") }, new XCData("x+y >= z-m") },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") },
new object[] { InputSpace.GetElement(100, 10).DescendantNodes().ToArray(), new XText("..") }
};
[Theory, MemberData("ExecuteXElementVariationParams")]
public void ExecuteXElementVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> toAddList = toAdd.OfType<XNode>();
XElement xElem = new XElement("root", contextNode);
XElement xElemOriginal = new XElement(xElem);
using (UndoManager undo = new UndoManager(xElem))
{
undo.Group();
using (EventsHelper elemHelper = new EventsHelper(xElem))
{
using (EventsHelper nodeHelper = new EventsHelper(contextNode))
{
contextNode.AddBeforeSelf(toAdd);
Assert.True(toAddList.SequenceEqual(contextNode.NodesBeforeSelf(), XNode.EqualityComparer), "Nodes not added correctly!");
nodeHelper.Verify(0);
}
elemHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(xElem.Nodes().SequenceEqual(xElemOriginal.Nodes(), XNode.EqualityComparer), "Undo did not work!");
Assert.True(xElem.Attributes().EqualsAllAttributes(xElemOriginal.Attributes(), Helpers.MyAttributeComparer), "Undo did not work!");
}
}
[Fact]
public void XDocumentAddNull()
{
XElement xElem = new XElement("root", "text");
EventsHelper elemHelper = new EventsHelper(xElem);
xElem.FirstNode.AddBeforeSelf(null);
elemHelper.Verify(0);
}
[Fact]
public void XElementWorkOnTextNodes1()
{
XElement elem = new XElement("A", "text2");
XNode n = elem.FirstNode;
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
n.AddBeforeSelf("text0");
Assert.Equal("text0text2", elem.Value);
n.AddBeforeSelf("text1");
Assert.Equal("text0text1text2", elem.Value);
eHelper.Verify(new XObjectChange[] { XObjectChange.Add, XObjectChange.Value });
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
[Fact]
public void XElementWorkOnTextNodes2()
{
XElement elem = new XElement("A", "text2");
XNode n = elem.FirstNode;
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
n.AddBeforeSelf("text0", "text1");
Assert.Equal("text0text1text2", elem.Value);
eHelper.Verify(XObjectChange.Add);
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
}
public class EventsAddAfterSelf
{
public static object[][] ExecuteXDocumentVariationParams = new object[][] {
new object [] { new XNode[] { new XElement("element") }, new XComment("Comment") },
new object [] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object [] { new XNode[] { new XDocumentType("root", "", "", "") }, new XText(" ") },
new object [] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object [] { new XNode[] { new XComment("Comment") }, new XElement("root") },
new object [] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") }
};
[Theory, MemberData("ExecuteXDocumentVariationParams")]
public void ExecuteXDocumentVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> toAddList = toAdd.OfType<XNode>();
XDocument xDoc = new XDocument(contextNode);
XDocument xDocOriginal = new XDocument(xDoc);
using (UndoManager undo = new UndoManager(xDoc))
{
undo.Group();
using (EventsHelper docHelper = new EventsHelper(xDoc))
{
using (EventsHelper nodeHelper = new EventsHelper(contextNode))
{
contextNode.AddAfterSelf(toAdd);
Assert.True(toAddList.SequenceEqual(contextNode.NodesAfterSelf(), XNode.EqualityComparer), "Nodes not added correctly!");
nodeHelper.Verify(0);
}
docHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(XNode.DeepEquals(xDoc, xDocOriginal), "Undo did not work!");
}
}
public static object[][] ExecuteXElementVariationParams = new object[][] {
new object[] { new XNode[] { new XElement("element") }, new XText("some text") },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object[] { new XNode[] { new XCData("x+y >= z-m") }, new XElement("child") },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object[] { new XNode[] { new XComment("Comment") }, new XCData("x+y >= z-m") },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") },
new object[] { InputSpace.GetElement(100, 10).DescendantNodes().ToArray(), new XText("..") }
};
[Theory, MemberData("ExecuteXElementVariationParams")]
public void ExecuteXElementVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> toAddList = toAdd.OfType<XNode>();
XElement xElem = new XElement("root", contextNode);
XElement xElemOriginal = new XElement(xElem);
using (UndoManager undo = new UndoManager(xElem))
{
undo.Group();
using (EventsHelper elemHelper = new EventsHelper(xElem))
{
using (EventsHelper nodeHelper = new EventsHelper(contextNode))
{
contextNode.AddAfterSelf(toAdd);
Assert.True(toAddList.SequenceEqual(contextNode.NodesAfterSelf(), XNode.EqualityComparer), "Nodes not added correctly!");
nodeHelper.Verify(0);
}
elemHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(xElem.Nodes().SequenceEqual(xElemOriginal.Nodes(), XNode.EqualityComparer), "Undo did not work!");
Assert.True(xElem.Attributes().EqualsAllAttributes(xElemOriginal.Attributes(), Helpers.MyAttributeComparer), "Undo did not work!");
}
}
[Fact]
public void XElementAddNull()
{
XElement xElem = new XElement("root", "text");
EventsHelper elemHelper = new EventsHelper(xElem);
xElem.LastNode.AddAfterSelf(null);
elemHelper.Verify(0);
}
[Fact]
public void XElementWorkOnTextNodes1()
{
XElement elem = new XElement("A", "text2");
XNode n = elem.FirstNode;
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
n.AddAfterSelf("text0");
Assert.Equal("text2text0", elem.Value);
n.AddAfterSelf("text1");
Assert.Equal("text2text0text1", elem.Value);
eHelper.Verify(new XObjectChange[] { XObjectChange.Value, XObjectChange.Value });
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
[Fact]
public void XElementWorkOnTextNodes2()
{
XElement elem = new XElement("A", "text2");
XNode n = elem.FirstNode;
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
n.AddAfterSelf("text0", "text1");
Assert.Equal("text2text0text1", elem.Value);
eHelper.Verify(XObjectChange.Value);
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
}
public class EventsAddFirst
{
public static object[][] ExecuteXDocumentVariationParams = new object[][] {
new object [] { new XNode[] { new XElement("element") }, null },
new object [] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, null },
new object [] { new XNode[] { new XDocumentType("root", "", "", "") }, null },
new object [] { new XNode[] { new XProcessingInstruction("PI", "Data") }, null },
new object [] { new XNode[] { new XComment("Comment") }, null },
new object [] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, null },
new object [] { new XNode[] { new XElement("element") }, new XComment("Comment") },
new object [] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object [] { new XNode[] { new XDocumentType("root", "", "", "") }, new XText(" ") },
new object [] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object [] { new XNode[] { new XComment("Comment") }, new XElement("root") },
new object [] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") }
};
[Theory, MemberData("ExecuteXDocumentVariationParams")]
public void ExecuteXDocumentVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> allNodes, toAddList = toAdd.OfType<XNode>();
XDocument xDoc = contextNode == null ? new XDocument() : new XDocument(contextNode);
XDocument xDocOriginal = new XDocument(xDoc);
using (UndoManager undo = new UndoManager(xDoc))
{
undo.Group();
using (EventsHelper docHelper = new EventsHelper(xDoc))
{
xDoc.AddFirst(toAdd);
allNodes = contextNode == null ? xDoc.Nodes() : contextNode.NodesBeforeSelf();
Assert.True(toAddList.SequenceEqual(allNodes, XNode.EqualityComparer), "Nodes not added correctly!");
docHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(XNode.DeepEquals(xDoc, xDocOriginal), "Undo did not work!");
}
}
public static object[][] ExecuteXElementVariationParams = new object[][] {
new object[] { new XNode[] { new XElement("element") }, null },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, null },
new object[] { new XNode[] { new XCData("x+y >= z-m") }, null },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, null },
new object[] { new XNode[] { new XComment("Comment") }, null },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, null },
new object[] { InputSpace.GetElement(100, 10).DescendantNodes().ToArray(), null },
new object[] { new XNode[] { new XElement("element") }, new XText("some text") },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object[] { new XNode[] { new XCData("x+y >= z-m") }, new XElement("child") },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object[] { new XNode[] { new XComment("Comment") }, new XCData("x+y >= z-m") },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") },
new object[] { InputSpace.GetElement(100, 10).DescendantNodes().ToArray(), new XText("..") }
};
[Theory, MemberData("ExecuteXElementVariationParams")]
public void ExecuteXElementVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> allNodes, toAddList = toAdd.OfType<XNode>();
XElement xElem = contextNode == null ? new XElement("root") : new XElement("root", contextNode);
XElement xElemOriginal = new XElement(xElem);
using (UndoManager undo = new UndoManager(xElem))
{
undo.Group();
using (EventsHelper elemHelper = new EventsHelper(xElem))
{
xElem.AddFirst(toAdd);
allNodes = contextNode == null ? xElem.Nodes() : contextNode.NodesBeforeSelf();
Assert.True(toAddList.SequenceEqual(allNodes, XNode.EqualityComparer), "Nodes not added correctly!");
elemHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(xElem.Nodes().SequenceEqual(xElemOriginal.Nodes(), XNode.EqualityComparer), "Undo did not work!");
Assert.True(xElem.Attributes().EqualsAllAttributes(xElemOriginal.Attributes(), Helpers.MyAttributeComparer), "Undo did not work!");
}
}
[Fact]
public void XElementAddNull()
{
XElement xElem = new XElement("root", "text");
EventsHelper elemHelper = new EventsHelper(xElem);
xElem.AddFirst(null);
elemHelper.Verify(0);
}
[Fact]
public void XElementWorkOnTextNodes1()
{
XElement elem = new XElement("A", "text2");
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
elem.AddFirst("text0");
Assert.Equal("text0text2", elem.Value);
elem.AddFirst("text1");
Assert.Equal("text1text0text2", elem.Value);
eHelper.Verify(new XObjectChange[] { XObjectChange.Add, XObjectChange.Add });
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
[Fact]
public void XElementWorkOnTextNodes2()
{
XElement elem = new XElement("A", "text2");
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
elem.AddFirst("text0", "text1");
Assert.Equal("text0text1text2", elem.Value);
eHelper.Verify(XObjectChange.Add);
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
[Fact]
public void XElementStringContent()
{
bool firstTime = true;
XElement element = XElement.Parse("<root/>");
element.Changing += new EventHandler<XObjectChangeEventArgs>(
delegate (object sender, XObjectChangeEventArgs e)
{
if (firstTime)
{
firstTime = false;
element.AddFirst("Value");
}
});
Assert.Throws<InvalidOperationException>(() => { element.AddFirst(""); });
element.Verify();
}
[Fact]
public void XElementParentedXNode()
{
bool firstTime = true;
XElement element = XElement.Parse("<root></root>");
XElement child = new XElement("Add", "Me");
XElement newElement = new XElement("new", "element");
element.Changing += new EventHandler<XObjectChangeEventArgs>(
delegate (object sender, XObjectChangeEventArgs e)
{
if (firstTime)
{
firstTime = false;
newElement.Add(child);
}
});
Assert.Throws<InvalidOperationException>(() => { element.AddFirst(child); });
element.Verify();
Assert.Null(element.Element("Add"));
}
}
public class EventsAdd
{
public static object[][] ExecuteXDocumentVariationParams = new object[][] {
new object[] { new XNode[] { new XElement("element") }, null },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, null },
new object[] { new XNode[] { new XDocumentType("root", "", "", "") }, null },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, null },
new object[] { new XNode[] { new XComment("Comment") }, null },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, null },
new object[] { new XNode[] { new XElement("element") }, new XComment("Comment") },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object[] { new XNode[] { new XDocumentType("root", "", "", "") }, new XText(" ") },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object[] { new XNode[] { new XComment("Comment") }, new XElement("root") },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") }
};
[Theory, MemberData("ExecuteXDocumentVariationParams")]
public void ExecuteXDocumentVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> allNodes, toAddList = toAdd.OfType<XNode>();
XDocument xDoc = contextNode == null ? new XDocument() : new XDocument(contextNode);
XDocument xDocOriginal = new XDocument(xDoc);
using (UndoManager undo = new UndoManager(xDoc))
{
undo.Group();
using (EventsHelper docHelper = new EventsHelper(xDoc))
{
xDoc.Add(toAdd);
allNodes = contextNode == null ? xDoc.Nodes() : contextNode.NodesAfterSelf();
Assert.True(toAddList.SequenceEqual(allNodes, XNode.EqualityComparer), "Nodes not added correctly!");
docHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(XNode.DeepEquals(xDoc, xDocOriginal), "Undo did not work!");
}
}
public static object[][] VariationsForXElementParams = new object[][] {
new object[] { new XNode[] { new XElement("element") }, null },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, null },
new object[] { new XNode[] { new XCData("x+y >= z-m") }, null },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, null },
new object[] { new XNode[] { new XComment("Comment") }, null },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, null },
new object[] { InputSpace.GetElement(100, 10).DescendantNodes().ToArray(), null },
new object[] { new XNode[] { new XElement("element") }, new XText("some text") },
new object[] { new XNode[] { new XElement("parent", new XElement("child", "child text")) }, new XProcessingInstruction("PI", "Data") },
new object[] { new XNode[] { new XCData("x+y >= z-m") }, new XElement("child") },
new object[] { new XNode[] { new XProcessingInstruction("PI", "Data") }, new XText(" ") },
new object[] { new XNode[] { new XComment("Comment") }, new XCData("x+y >= z-m") },
new object[] { new XNode[] { new XText(""), new XText(" "), new XText("\t") }, new XText(" ") },
new object[] { InputSpace.GetElement(100, 10).DescendantNodes().ToArray(), new XText("..") }
};
[Theory, MemberData("VariationsForXElementParams")]
public void ExecuteXElementVariation(XNode[] toAdd, XNode contextNode)
{
IEnumerable<XNode> allNodes, toAddList = toAdd.OfType<XNode>();
XElement xElem = contextNode == null ? new XElement("root") : new XElement("root", contextNode);
XElement xElemOriginal = new XElement(xElem);
using (UndoManager undo = new UndoManager(xElem))
{
undo.Group();
using (EventsHelper elemHelper = new EventsHelper(xElem))
{
xElem.Add(toAdd);
allNodes = contextNode == null ? xElem.Nodes() : contextNode.NodesAfterSelf();
Assert.True(toAddList.SequenceEqual(allNodes, XNode.EqualityComparer), "Nodes not added correctly!");
elemHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(xElem.Nodes().SequenceEqual(xElemOriginal.Nodes(), XNode.EqualityComparer), "Undo did not work!");
Assert.True(xElem.Attributes().EqualsAllAttributes(xElemOriginal.Attributes(), Helpers.MyAttributeComparer), "Undo did not work!");
}
}
public static object[][] ExecuteXAttributeVariationParams = new object[][] {
new object[] { new XAttribute[] { new XAttribute("xxx", "yyy") }, null },
new object[] { new XAttribute[] { new XAttribute("{a}xxx", "a_yyy") }, null },
new object[] { InputSpace.GetElement(100, 10).Attributes().ToArray(), null },
new object[] { new XAttribute[] { new XAttribute("xxx", "yyy") }, new XAttribute("a", "aa") },
new object[] { new XAttribute[] { new XAttribute("{b}xxx", "b_yyy") }, new XAttribute("a", "aa") },
new object[] { InputSpace.GetElement(100, 10).Attributes().ToArray(), new XAttribute("a", "aa") }
};
[Theory, MemberData("ExecuteXAttributeVariationParams")]
public void ExecuteXAttributeVariation(XAttribute[] toAdd, XAttribute contextNode)
{
IEnumerable<XAttribute> allNodes, toAddList = toAdd.OfType<XAttribute>();
XElement xElem = contextNode == null ? new XElement("root") : new XElement("root", contextNode);
XElement xElemOriginal = new XElement(xElem);
using (UndoManager undo = new UndoManager(xElem))
{
undo.Group();
using (EventsHelper elemHelper = new EventsHelper(xElem))
{
xElem.Add(toAdd);
allNodes = contextNode == null ? xElem.Attributes() : xElem.Attributes().Skip(1);
Assert.True(toAddList.SequenceEqual(allNodes, Helpers.MyAttributeComparer), "Attributes not added correctly!");
elemHelper.Verify(XObjectChange.Add, toAdd);
}
undo.Undo();
Assert.True(xElem.Nodes().SequenceEqual(xElemOriginal.Nodes(), XNode.EqualityComparer), "Undo did not work!");
Assert.True(xElem.Attributes().EqualsAllAttributes(xElemOriginal.Attributes(), Helpers.MyAttributeComparer), "Undo did not work!");
}
}
[Fact]
public void XAttributeXAttributeAddAtDeepLevel()
{
XDocument xDoc = new XDocument(InputSpace.GetAttributeElement(100, 10));
XDocument xDocOriginal = new XDocument(xDoc);
using (UndoManager undo = new UndoManager(xDoc))
{
undo.Group();
using (EventsHelper docHelper = new EventsHelper(xDoc))
{
using (EventsHelper eHelper = new EventsHelper(xDoc.Root))
{
foreach (XElement x in xDoc.Root.Descendants())
{
x.Add(new XAttribute("at", "value"));
eHelper.Verify(XObjectChange.Add);
}
docHelper.Verify(XObjectChange.Add, xDoc.Root.Descendants().Count());
}
}
undo.Undo();
Assert.True(XNode.DeepEquals(xDoc, xDocOriginal), "Undo did not work!");
}
}
[Fact]
public void XElementXElementAddAtDeepLevel()
{
XDocument xDoc = new XDocument(InputSpace.GetElement(100, 10));
XDocument xDocOriginal = new XDocument(xDoc);
using (UndoManager undo = new UndoManager(xDoc))
{
undo.Group();
using (EventsHelper docHelper = new EventsHelper(xDoc))
{
using (EventsHelper eHelper = new EventsHelper(xDoc.Root))
{
foreach (XElement x in xDoc.Root.Descendants())
{
x.Add(new XText("Add Me"));
eHelper.Verify(XObjectChange.Add);
}
docHelper.Verify(XObjectChange.Add, xDoc.Root.Descendants().Count());
}
}
undo.Undo();
Assert.True(XNode.DeepEquals(xDoc, xDocOriginal), "Undo did not work!");
}
}
[Fact]
public void XElementWorkTextNodes()
{
XElement elem = new XElement("A", "text2");
XElement xElemOriginal = new XElement(elem);
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
XNode x = elem.LastNode;
eHelper.Verify(0);
}
undo.Undo();
Assert.True(XNode.DeepEquals(elem, xElemOriginal), "Undo did not work!");
}
}
[Fact]
public void XElementWorkOnTextNodes1()
{
XElement elem = new XElement("A", "text2");
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
elem.Add("text0");
Assert.Equal("text2text0", elem.Value);
elem.Add("text1");
Assert.Equal("text2text0text1", elem.Value);
eHelper.Verify(new XObjectChange[] { XObjectChange.Value, XObjectChange.Value });
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
[Fact]
public void XElementWorkOnTextNodes2()
{
XElement elem = new XElement("A", "text2");
XElement xElemOriginal = new XElement(elem);
using (UndoManager undo = new UndoManager(elem))
{
undo.Group();
using (EventsHelper eHelper = new EventsHelper(elem))
{
elem.Add("text0", "text1");
Assert.Equal("text2text0text1", elem.Value);
eHelper.Verify(new XObjectChange[] { XObjectChange.Value, XObjectChange.Value });
}
undo.Undo();
Assert.Equal("text2", elem.Value);
}
}
[Fact]
public void XElementStringContent()
{
bool firstTime = true;
XElement element = XElement.Parse("<root/>");
element.Changing += new EventHandler<XObjectChangeEventArgs>(
delegate (object sender, XObjectChangeEventArgs e)
{
if (firstTime)
{
firstTime = false;
element.Add("Value");
}
});
Assert.Throws<InvalidOperationException>(() => { element.Add(""); });
element.Verify();
}
[Fact]
public void XElementParentedXNode()
{
bool firstTime = true;
XElement element = XElement.Parse("<root></root>");
XElement child = new XElement("Add", "Me");
XElement newElement = new XElement("new", "element");
element.Changing += new EventHandler<XObjectChangeEventArgs>(
delegate (object sender, XObjectChangeEventArgs e)
{
if (firstTime)
{
firstTime = false;
newElement.Add(child);
}
});
Assert.Throws<InvalidOperationException>(() => { element.Add(child); });
element.Verify();
Assert.Null(element.Element("Add"));
}
[Fact]
public void XElementParentedAttribute()
{
bool firstTime = true;
XElement element = XElement.Parse("<root></root>");
XElement newElement = new XElement("new", "element");
XAttribute child = new XAttribute("Add", "Me");
element.Changing += new EventHandler<XObjectChangeEventArgs>(
delegate (object sender, XObjectChangeEventArgs e)
{
if (firstTime)
{
firstTime = false;
newElement.Add(child);
}
});
Assert.Throws<InvalidOperationException>(() => { element.Add(child); });
element.Verify();
Assert.Null(element.Attribute("Add"));
}
}
}
| |
using Lucene.Net.Support;
using System;
using System.Collections.Generic;
using System.Text;
/*
* dk.brics.automaton
*
* Copyright (c) 2001-2009 Anders Moeller
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* this SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
* NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
* this SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
namespace Lucene.Net.Util.Automaton
{
/// <summary>
/// Regular Expression extension to <code>Automaton</code>.
/// <p>
/// Regular expressions are built from the following abstract syntax:
/// <p>
/// <table border=0>
/// <tr>
/// <td><i>regexp</i></td>
/// <td>::=</td>
/// <td><i>unionexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>unionexp</i></td>
/// <td>::=</td>
/// <td><i>interexp</i> <tt><b>|</b></tt> <i>unionexp</i></td>
/// <td>(union)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>interexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>interexp</i></td>
/// <td>::=</td>
/// <td><i>concatexp</i> <tt><b>&</b></tt> <i>interexp</i></td>
/// <td>(intersection)</td>
/// <td><small>[OPTIONAL]</small></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>concatexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>concatexp</i></td>
/// <td>::=</td>
/// <td><i>repeatexp</i> <i>concatexp</i></td>
/// <td>(concatenation)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>repeatexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>repeatexp</i></td>
/// <td>::=</td>
/// <td><i>repeatexp</i> <tt><b>?</b></tt></td>
/// <td>(zero or one occurrence)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>repeatexp</i> <tt><b>*</b></tt></td>
/// <td>(zero or more occurrences)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>repeatexp</i> <tt><b>+</b></tt></td>
/// <td>(one or more occurrences)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>repeatexp</i> <tt><b>{</b><i>n</i><b>}</b></tt></td>
/// <td>(<tt><i>n</i></tt> occurrences)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>repeatexp</i> <tt><b>{</b><i>n</i><b>,}</b></tt></td>
/// <td>(<tt><i>n</i></tt> or more occurrences)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>repeatexp</i> <tt><b>{</b><i>n</i><b>,</b><i>m</i><b>}</b></tt></td>
/// <td>(<tt><i>n</i></tt> to <tt><i>m</i></tt> occurrences, including both)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>complexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>complexp</i></td>
/// <td>::=</td>
/// <td><tt><b>~</b></tt> <i>complexp</i></td>
/// <td>(complement)</td>
/// <td><small>[OPTIONAL]</small></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>charclassexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>charclassexp</i></td>
/// <td>::=</td>
/// <td><tt><b>[</b></tt> <i>charclasses</i> <tt><b>]</b></tt></td>
/// <td>(character class)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>[^</b></tt> <i>charclasses</i> <tt><b>]</b></tt></td>
/// <td>(negated character class)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>simpleexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>charclasses</i></td>
/// <td>::=</td>
/// <td><i>charclass</i> <i>charclasses</i></td>
/// <td></td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>charclass</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>charclass</i></td>
/// <td>::=</td>
/// <td><i>charexp</i> <tt><b>-</b></tt> <i>charexp</i></td>
/// <td>(character range, including end-points)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><i>charexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
///
/// <tr>
/// <td><i>simpleexp</i></td>
/// <td>::=</td>
/// <td><i>charexp</i></td>
/// <td></td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>.</b></tt></td>
/// <td>(any single character)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>#</b></tt></td>
/// <td>(the empty language)</td>
/// <td><small>[OPTIONAL]</small></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>@</b></tt></td>
/// <td>(any string)</td>
/// <td><small>[OPTIONAL]</small></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>"</b></tt> <Unicode string without double-quotes> <tt><b>"</b></tt></td>
/// <td>(a string)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>(</b></tt> <tt><b>)</b></tt></td>
/// <td>(the empty string)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>(</b></tt> <i>unionexp</i> <tt><b>)</b></tt></td>
/// <td>(precedence override)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b><</b></tt> <identifier> <tt><b>></b></tt></td>
/// <td>(named automaton)</td>
/// <td><small>[OPTIONAL]</small></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b><</b><i>n</i>-<i>m</i><b>></b></tt></td>
/// <td>(numerical interval)</td>
/// <td><small>[OPTIONAL]</small></td>
/// </tr>
///
/// <tr>
/// <td><i>charexp</i></td>
/// <td>::=</td>
/// <td><Unicode character></td>
/// <td>(a single non-reserved character)</td>
/// <td></td>
/// </tr>
/// <tr>
/// <td></td>
/// <td>|</td>
/// <td><tt><b>\</b></tt> <Unicode character> </td>
/// <td>(a single character)</td>
/// <td></td>
/// </tr>
/// </table>
/// <p>
/// The productions marked <small>[OPTIONAL]</small> are only allowed if
/// specified by the syntax flags passed to the <code>RegExp</code> constructor.
/// The reserved characters used in the (enabled) syntax must be escaped with
/// backslash (<tt><b>\</b></tt>) or double-quotes (<tt><b>"..."</b></tt>). (In
/// contrast to other regexp syntaxes, this is required also in character
/// classes.) Be aware that dash (<tt><b>-</b></tt>) has a special meaning in
/// <i>charclass</i> expressions. An identifier is a string not containing right
/// angle bracket (<tt><b>></b></tt>) or dash (<tt><b>-</b></tt>). Numerical
/// intervals are specified by non-negative decimal integers and include both end
/// points, and if <tt><i>n</i></tt> and <tt><i>m</i></tt> have the same number
/// of digits, then the conforming strings must have that length (i.e. prefixed
/// by 0's).
///
/// @lucene.experimental
/// </summary>
public class RegExp
{
internal enum Kind
{
REGEXP_UNION,
REGEXP_CONCATENATION,
REGEXP_INTERSECTION,
REGEXP_OPTIONAL,
REGEXP_REPEAT,
REGEXP_REPEAT_MIN,
REGEXP_REPEAT_MINMAX,
REGEXP_COMPLEMENT,
REGEXP_CHAR,
REGEXP_CHAR_RANGE,
REGEXP_ANYCHAR,
REGEXP_EMPTY,
REGEXP_STRING,
REGEXP_ANYSTRING,
REGEXP_AUTOMATON,
REGEXP_INTERVAL
}
/// <summary>
/// Syntax flag, enables intersection (<tt>&</tt>).
/// </summary>
public const int INTERSECTION = 0x0001;
/// <summary>
/// Syntax flag, enables complement (<tt>~</tt>).
/// </summary>
public const int COMPLEMENT = 0x0002;
/// <summary>
/// Syntax flag, enables empty language (<tt>#</tt>).
/// </summary>
public const int EMPTY = 0x0004;
/// <summary>
/// Syntax flag, enables anystring (<tt>@</tt>).
/// </summary>
public const int ANYSTRING = 0x0008;
/// <summary>
/// Syntax flag, enables named automata (<tt><</tt>identifier<tt>></tt>).
/// </summary>
public const int AUTOMATON = 0x0010;
/// <summary>
/// Syntax flag, enables numerical intervals (
/// <tt><<i>n</i>-<i>m</i>></tt>).
/// </summary>
public const int INTERVAL = 0x0020;
/// <summary>
/// Syntax flag, enables all optional regexp syntax.
/// </summary>
public const int ALL = 0xffff;
/// <summary>
/// Syntax flag, enables no optional regexp syntax.
/// </summary>
public const int NONE = 0x0000;
private static bool Allow_mutation = false;
internal Kind kind;
internal RegExp Exp1, Exp2;
internal string s;
internal int c;
internal int Min, Max, Digits;
internal int From, To;
internal string b;
internal int Flags;
internal int Pos;
internal RegExp()
{
}
/// <summary>
/// Constructs new <code>RegExp</code> from a string. Same as
/// <code>RegExp(s, ALL)</code>.
/// </summary>
/// <param name="s"> regexp string </param>
/// <exception cref="IllegalArgumentException"> if an error occured while parsing the
/// regular expression </exception>
public RegExp(string s)
: this(s, ALL)
{
}
/// <summary>
/// Constructs new <code>RegExp</code> from a string.
/// </summary>
/// <param name="s"> regexp string </param>
/// <param name="syntax_flags"> boolean 'or' of optional syntax constructs to be
/// enabled </param>
/// <exception cref="IllegalArgumentException"> if an error occured while parsing the
/// regular expression </exception>
public RegExp(string s, int syntax_flags)
{
b = s;
Flags = syntax_flags;
RegExp e;
if (s.Length == 0)
{
e = MakeString("");
}
else
{
e = ParseUnionExp();
if (Pos < b.Length)
{
throw new System.ArgumentException("end-of-string expected at position " + Pos);
}
}
kind = e.kind;
Exp1 = e.Exp1;
Exp2 = e.Exp2;
this.s = e.s;
c = e.c;
Min = e.Min;
Max = e.Max;
Digits = e.Digits;
From = e.From;
To = e.To;
b = null;
}
/// <summary>
/// Constructs new <code>Automaton</code> from this <code>RegExp</code>. Same
/// as <code>toAutomaton(null)</code> (empty automaton map).
/// </summary>
public virtual Automaton ToAutomaton()
{
return ToAutomatonAllowMutate(null, null);
}
/// <summary>
/// Constructs new <code>Automaton</code> from this <code>RegExp</code>. The
/// constructed automaton is minimal and deterministic and has no transitions
/// to dead states.
/// </summary>
/// <param name="automaton_provider"> provider of automata for named identifiers </param>
/// <exception cref="IllegalArgumentException"> if this regular expression uses a named
/// identifier that is not available from the automaton provider </exception>
public virtual Automaton ToAutomaton(AutomatonProvider automaton_provider)
{
return ToAutomatonAllowMutate(null, automaton_provider);
}
/// <summary>
/// Constructs new <code>Automaton</code> from this <code>RegExp</code>. The
/// constructed automaton is minimal and deterministic and has no transitions
/// to dead states.
/// </summary>
/// <param name="automata"> a map from automaton identifiers to automata (of type
/// <code>Automaton</code>). </param>
/// <exception cref="IllegalArgumentException"> if this regular expression uses a named
/// identifier that does not occur in the automaton map </exception>
public virtual Automaton ToAutomaton(IDictionary<string, Automaton> automata)
{
return ToAutomatonAllowMutate(automata, null);
}
/// <summary>
/// Sets or resets allow mutate flag. If this flag is set, then automata
/// construction uses mutable automata, which is slightly faster but not thread
/// safe. By default, the flag is not set.
/// </summary>
/// <param name="flag"> if true, the flag is set </param>
/// <returns> previous value of the flag </returns>
public virtual bool SetAllowMutate(bool flag)
{
bool b = Allow_mutation;
Allow_mutation = flag;
return b;
}
private Automaton ToAutomatonAllowMutate(IDictionary<string, Automaton> automata, AutomatonProvider automaton_provider)
{
bool b = false;
if (Allow_mutation) // thread unsafe
{
b = Automaton.SetAllowMutate(true);
}
Automaton a = ToAutomaton(automata, automaton_provider);
if (Allow_mutation)
{
Automaton.AllowMutate = b;
}
return a;
}
private Automaton ToAutomaton(IDictionary<string, Automaton> automata, AutomatonProvider automaton_provider)
{
IList<Automaton> list;
Automaton a = null;
switch (kind)
{
case Kind.REGEXP_UNION:
list = new List<Automaton>();
FindLeaves(Exp1, Kind.REGEXP_UNION, list, automata, automaton_provider);
FindLeaves(Exp2, Kind.REGEXP_UNION, list, automata, automaton_provider);
a = BasicOperations.Union(list);
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_CONCATENATION:
list = new List<Automaton>();
FindLeaves(Exp1, Kind.REGEXP_CONCATENATION, list, automata, automaton_provider);
FindLeaves(Exp2, Kind.REGEXP_CONCATENATION, list, automata, automaton_provider);
a = BasicOperations.Concatenate(list);
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_INTERSECTION:
a = Exp1.ToAutomaton(automata, automaton_provider).Intersection(Exp2.ToAutomaton(automata, automaton_provider));
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_OPTIONAL:
a = Exp1.ToAutomaton(automata, automaton_provider).Optional();
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_REPEAT:
a = Exp1.ToAutomaton(automata, automaton_provider).Repeat();
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_REPEAT_MIN:
a = Exp1.ToAutomaton(automata, automaton_provider).Repeat(Min);
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_REPEAT_MINMAX:
a = Exp1.ToAutomaton(automata, automaton_provider).Repeat(Min, Max);
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_COMPLEMENT:
a = Exp1.ToAutomaton(automata, automaton_provider).Complement();
MinimizationOperations.Minimize(a);
break;
case Kind.REGEXP_CHAR:
a = BasicAutomata.MakeChar(c);
break;
case Kind.REGEXP_CHAR_RANGE:
a = BasicAutomata.MakeCharRange(From, To);
break;
case Kind.REGEXP_ANYCHAR:
a = BasicAutomata.MakeAnyChar();
break;
case Kind.REGEXP_EMPTY:
a = BasicAutomata.MakeEmpty();
break;
case Kind.REGEXP_STRING:
a = BasicAutomata.MakeString(s);
break;
case Kind.REGEXP_ANYSTRING:
a = BasicAutomata.MakeAnyString();
break;
case Kind.REGEXP_AUTOMATON:
Automaton aa = null;
if (automata != null)
{
aa = automata[s];
}
if (aa == null && automaton_provider != null)
{
try
{
aa = automaton_provider.GetAutomaton(s);
}
catch (System.IO.IOException e)
{
throw new System.ArgumentException(e.Message, e);
}
}
if (aa == null)
{
throw new System.ArgumentException("'" + s + "' not found");
}
a = (Automaton)aa.Clone(); // always clone here (ignore allow_mutate)
break;
case Kind.REGEXP_INTERVAL:
a = BasicAutomata.MakeInterval(Min, Max, Digits);
break;
}
return a;
}
private void FindLeaves(RegExp exp, Kind kind, IList<Automaton> list, IDictionary<string, Automaton> automata, AutomatonProvider automaton_provider)
{
if (exp.kind == kind)
{
FindLeaves(exp.Exp1, kind, list, automata, automaton_provider);
FindLeaves(exp.Exp2, kind, list, automata, automaton_provider);
}
else
{
list.Add(exp.ToAutomaton(automata, automaton_provider));
}
}
/// <summary>
/// Constructs string from parsed regular expression.
/// </summary>
public override string ToString()
{
return ToStringBuilder(new StringBuilder()).ToString();
}
internal virtual StringBuilder ToStringBuilder(StringBuilder b)
{
switch (kind)
{
case Kind.REGEXP_UNION:
b.Append("(");
Exp1.ToStringBuilder(b);
b.Append("|");
Exp2.ToStringBuilder(b);
b.Append(")");
break;
case Kind.REGEXP_CONCATENATION:
Exp1.ToStringBuilder(b);
Exp2.ToStringBuilder(b);
break;
case Kind.REGEXP_INTERSECTION:
b.Append("(");
Exp1.ToStringBuilder(b);
b.Append("&");
Exp2.ToStringBuilder(b);
b.Append(")");
break;
case Kind.REGEXP_OPTIONAL:
b.Append("(");
Exp1.ToStringBuilder(b);
b.Append(")?");
break;
case Kind.REGEXP_REPEAT:
b.Append("(");
Exp1.ToStringBuilder(b);
b.Append(")*");
break;
case Kind.REGEXP_REPEAT_MIN:
b.Append("(");
Exp1.ToStringBuilder(b);
b.Append("){").Append(Min).Append(",}");
break;
case Kind.REGEXP_REPEAT_MINMAX:
b.Append("(");
Exp1.ToStringBuilder(b);
b.Append("){").Append(Min).Append(",").Append(Max).Append("}");
break;
case Kind.REGEXP_COMPLEMENT:
b.Append("~(");
Exp1.ToStringBuilder(b);
b.Append(")");
break;
case Kind.REGEXP_CHAR:
b.Append("\\").Append(Character.ToChars(c));
break;
case Kind.REGEXP_CHAR_RANGE:
b.Append("[\\").Append(Character.ToChars(From)).Append("-\\").Append(Character.ToChars(To)).Append("]");
break;
case Kind.REGEXP_ANYCHAR:
b.Append(".");
break;
case Kind.REGEXP_EMPTY:
b.Append("#");
break;
case Kind.REGEXP_STRING:
b.Append("\"").Append(s).Append("\"");
break;
case Kind.REGEXP_ANYSTRING:
b.Append("@");
break;
case Kind.REGEXP_AUTOMATON:
b.Append("<").Append(s).Append(">");
break;
case Kind.REGEXP_INTERVAL:
string s1 = Convert.ToString(Min);
string s2 = Convert.ToString(Max);
b.Append("<");
if (Digits > 0)
{
for (int i = s1.Length; i < Digits; i++)
{
b.Append('0');
}
}
b.Append(s1).Append("-");
if (Digits > 0)
{
for (int i = s2.Length; i < Digits; i++)
{
b.Append('0');
}
}
b.Append(s2).Append(">");
break;
}
return b;
}
/// <summary>
/// Returns set of automaton identifiers that occur in this regular expression.
/// </summary>
public virtual ISet<string> Identifiers
{
get
{
HashSet<string> set = new HashSet<string>();
GetIdentifiers(set);
return set;
}
}
internal virtual void GetIdentifiers(ISet<string> set)
{
switch (kind)
{
case Kind.REGEXP_UNION:
case Kind.REGEXP_CONCATENATION:
case Kind.REGEXP_INTERSECTION:
Exp1.GetIdentifiers(set);
Exp2.GetIdentifiers(set);
break;
case Kind.REGEXP_OPTIONAL:
case Kind.REGEXP_REPEAT:
case Kind.REGEXP_REPEAT_MIN:
case Kind.REGEXP_REPEAT_MINMAX:
case Kind.REGEXP_COMPLEMENT:
Exp1.GetIdentifiers(set);
break;
case Kind.REGEXP_AUTOMATON:
set.Add(s);
break;
default:
break;
}
}
internal static RegExp MakeUnion(RegExp exp1, RegExp exp2)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_UNION;
r.Exp1 = exp1;
r.Exp2 = exp2;
return r;
}
internal static RegExp MakeConcatenation(RegExp exp1, RegExp exp2)
{
if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING) && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING))
{
return MakeString(exp1, exp2);
}
RegExp r = new RegExp();
r.kind = Kind.REGEXP_CONCATENATION;
if (exp1.kind == Kind.REGEXP_CONCATENATION && (exp1.Exp2.kind == Kind.REGEXP_CHAR || exp1.Exp2.kind == Kind.REGEXP_STRING) && (exp2.kind == Kind.REGEXP_CHAR || exp2.kind == Kind.REGEXP_STRING))
{
r.Exp1 = exp1.Exp1;
r.Exp2 = MakeString(exp1.Exp2, exp2);
}
else if ((exp1.kind == Kind.REGEXP_CHAR || exp1.kind == Kind.REGEXP_STRING) && exp2.kind == Kind.REGEXP_CONCATENATION && (exp2.Exp1.kind == Kind.REGEXP_CHAR || exp2.Exp1.kind == Kind.REGEXP_STRING))
{
r.Exp1 = MakeString(exp1, exp2.Exp1);
r.Exp2 = exp2.Exp2;
}
else
{
r.Exp1 = exp1;
r.Exp2 = exp2;
}
return r;
}
private static RegExp MakeString(RegExp exp1, RegExp exp2)
{
StringBuilder b = new StringBuilder();
if (exp1.kind == Kind.REGEXP_STRING)
{
b.Append(exp1.s);
}
else
{
b.Append(Character.ToChars(exp1.c));
}
if (exp2.kind == Kind.REGEXP_STRING)
{
b.Append(exp2.s);
}
else
{
b.Append(Character.ToChars(exp2.c));
}
return MakeString(b.ToString());
}
internal static RegExp MakeIntersection(RegExp exp1, RegExp exp2)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_INTERSECTION;
r.Exp1 = exp1;
r.Exp2 = exp2;
return r;
}
internal static RegExp MakeOptional(RegExp exp)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_OPTIONAL;
r.Exp1 = exp;
return r;
}
internal static RegExp MakeRepeat(RegExp exp)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_REPEAT;
r.Exp1 = exp;
return r;
}
internal static RegExp MakeRepeat(RegExp exp, int min)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_REPEAT_MIN;
r.Exp1 = exp;
r.Min = min;
return r;
}
internal static RegExp MakeRepeat(RegExp exp, int min, int max)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_REPEAT_MINMAX;
r.Exp1 = exp;
r.Min = min;
r.Max = max;
return r;
}
internal static RegExp MakeComplement(RegExp exp)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_COMPLEMENT;
r.Exp1 = exp;
return r;
}
internal static RegExp MakeChar(int c)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_CHAR;
r.c = c;
return r;
}
internal static RegExp MakeCharRange(int from, int to)
{
if (from > to)
{
throw new System.ArgumentException("invalid range: from (" + from + ") cannot be > to (" + to + ")");
}
RegExp r = new RegExp();
r.kind = Kind.REGEXP_CHAR_RANGE;
r.From = from;
r.To = to;
return r;
}
internal static RegExp MakeAnyChar()
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_ANYCHAR;
return r;
}
internal static RegExp MakeEmpty()
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_EMPTY;
return r;
}
internal static RegExp MakeString(string s)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_STRING;
r.s = s;
return r;
}
internal static RegExp MakeAnyString()
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_ANYSTRING;
return r;
}
internal static RegExp MakeAutomaton(string s)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_AUTOMATON;
r.s = s;
return r;
}
internal static RegExp MakeInterval(int min, int max, int digits)
{
RegExp r = new RegExp();
r.kind = Kind.REGEXP_INTERVAL;
r.Min = min;
r.Max = max;
r.Digits = digits;
return r;
}
private bool Peek(string s)
{
return More() && s.IndexOf((char)Character.CodePointAt(b, Pos)) != -1;
}
private bool Match(int c)
{
if (Pos >= b.Length)
{
return false;
}
if (Character.CodePointAt(b, Pos) == c)
{
Pos += Character.CharCount(c);
return true;
}
return false;
}
private bool More()
{
return Pos < b.Length;
}
private int Next()
{
if (!More())
{
throw new System.ArgumentException("unexpected end-of-string");
}
int ch = Character.CodePointAt(b, Pos);
Pos += Character.CharCount(ch);
return ch;
}
private bool Check(int flag)
{
return (Flags & flag) != 0;
}
internal RegExp ParseUnionExp()
{
RegExp e = ParseInterExp();
if (Match('|'))
{
e = MakeUnion(e, ParseUnionExp());
}
return e;
}
internal RegExp ParseInterExp()
{
RegExp e = ParseConcatExp();
if (Check(INTERSECTION) && Match('&'))
{
e = MakeIntersection(e, ParseInterExp());
}
return e;
}
internal RegExp ParseConcatExp()
{
RegExp e = ParseRepeatExp();
if (More() && !Peek(")|") && (!Check(INTERSECTION) || !Peek("&")))
{
e = MakeConcatenation(e, ParseConcatExp());
}
return e;
}
internal RegExp ParseRepeatExp()
{
RegExp e = ParseComplExp();
while (Peek("?*+{"))
{
if (Match('?'))
{
e = MakeOptional(e);
}
else if (Match('*'))
{
e = MakeRepeat(e);
}
else if (Match('+'))
{
e = MakeRepeat(e, 1);
}
else if (Match('{'))
{
int start = Pos;
while (Peek("0123456789"))
{
Next();
}
if (start == Pos)
{
throw new System.ArgumentException("integer expected at position " + Pos);
}
int n = Convert.ToInt32(b.Substring(start, Pos - start));
int m = -1;
if (Match(','))
{
start = Pos;
while (Peek("0123456789"))
{
Next();
}
if (start != Pos)
{
m = Convert.ToInt32(b.Substring(start, Pos - start));
}
}
else
{
m = n;
}
if (!Match('}'))
{
throw new System.ArgumentException("expected '}' at position " + Pos);
}
if (m == -1)
{
e = MakeRepeat(e, n);
}
else
{
e = MakeRepeat(e, n, m);
}
}
}
return e;
}
internal RegExp ParseComplExp()
{
if (Check(COMPLEMENT) && Match('~'))
{
return MakeComplement(ParseComplExp());
}
else
{
return ParseCharClassExp();
}
}
internal RegExp ParseCharClassExp()
{
if (Match('['))
{
bool negate = false;
if (Match('^'))
{
negate = true;
}
RegExp e = ParseCharClasses();
if (negate)
{
e = MakeIntersection(MakeAnyChar(), MakeComplement(e));
}
if (!Match(']'))
{
throw new System.ArgumentException("expected ']' at position " + Pos);
}
return e;
}
else
{
return ParseSimpleExp();
}
}
internal RegExp ParseCharClasses()
{
RegExp e = ParseCharClass();
while (More() && !Peek("]"))
{
e = MakeUnion(e, ParseCharClass());
}
return e;
}
internal RegExp ParseCharClass()
{
int c = ParseCharExp();
if (Match('-'))
{
return MakeCharRange(c, ParseCharExp());
}
else
{
return MakeChar(c);
}
}
internal RegExp ParseSimpleExp()
{
if (Match('.'))
{
return MakeAnyChar();
}
else if (Check(EMPTY) && Match('#'))
{
return MakeEmpty();
}
else if (Check(ANYSTRING) && Match('@'))
{
return MakeAnyString();
}
else if (Match('"'))
{
int start = Pos;
while (More() && !Peek("\""))
{
Next();
}
if (!Match('"'))
{
throw new System.ArgumentException("expected '\"' at position " + Pos);
}
return MakeString(b.Substring(start, Pos - 1 - start));
}
else if (Match('('))
{
if (Match(')'))
{
return MakeString("");
}
RegExp e = ParseUnionExp();
if (!Match(')'))
{
throw new System.ArgumentException("expected ')' at position " + Pos);
}
return e;
}
else if ((Check(AUTOMATON) || Check(INTERVAL)) && Match('<'))
{
int start = Pos;
while (More() && !Peek(">"))
{
Next();
}
if (!Match('>'))
{
throw new System.ArgumentException("expected '>' at position " + Pos);
}
string s = b.Substring(start, Pos - 1 - start);
int i = s.IndexOf('-');
if (i == -1)
{
if (!Check(AUTOMATON))
{
throw new System.ArgumentException("interval syntax error at position " + (Pos - 1));
}
return MakeAutomaton(s);
}
else
{
if (!Check(INTERVAL))
{
throw new System.ArgumentException("illegal identifier at position " + (Pos - 1));
}
try
{
if (i == 0 || i == s.Length - 1 || i != s.LastIndexOf('-'))
{
throw new System.FormatException();
}
string smin = s.Substring(0, i);
string smax = s.Substring(i + 1, s.Length - (i + 1));
int imin = Convert.ToInt32(smin);
int imax = Convert.ToInt32(smax);
int digits;
if (smin.Length == smax.Length)
{
digits = smin.Length;
}
else
{
digits = 0;
}
if (imin > imax)
{
int t = imin;
imin = imax;
imax = t;
}
return MakeInterval(imin, imax, digits);
}
catch (System.FormatException e)
{
throw new System.ArgumentException("interval syntax error at position " + (Pos - 1));
}
}
}
else
{
return MakeChar(ParseCharExp());
}
}
internal int ParseCharExp()
{
Match('\\');
return Next();
}
}
}
| |
/*
Copyright 2012 Michael Edwards
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
//-CRE-
using System;
using System.IO;
using System.Linq.Expressions;
using System.Web.UI;
using Sitecore.Data.Items;
using Sitecore.Web.UI;
namespace Glass.Mapper.Sc.Web.Ui
{
/// <summary>
/// Class AbstractGlassUserControl
/// </summary>
public abstract class AbstractGlassWebControl : WebControl
{
private TextWriter _writer;
protected TextWriter Output
{
get { return _writer ?? System.Web.HttpContext.Current.Response.Output; }
}
private ISitecoreContext _sitecoreContext;
private IGlassHtml _glassHtml;
/// <summary>
/// Initializes a new instance of the <see cref="AbstractGlassUserControl" /> class.
/// </summary>
/// <param name="context">The context.</param>
/// <param name="glassHtml"></param>
public AbstractGlassWebControl(ISitecoreContext context, IGlassHtml glassHtml)
{
_glassHtml = glassHtml;
_sitecoreContext = context;
}
/// <summary>
/// Initializes a new instance of the <see cref="AbstractGlassUserControl" /> class.
/// </summary>
public AbstractGlassWebControl()
: this(null, null)
{
}
protected override void OnInit(EventArgs e)
{
//we have to activate it here because of
//some weird lifecycle stuff in the page editor
if (_sitecoreContext == null)
{
_sitecoreContext = Sc.SitecoreContext.GetFromHttpContext();
_glassHtml = new GlassHtml(_sitecoreContext);
}
base.OnInit(e);
}
/// <summary>
/// Gets a value indicating whether this instance is in editing mode.
/// </summary>
/// <value>
/// <c>true</c> if this instance is in editing mode; otherwise, <c>false</c>.
/// </value>
public bool IsInEditingMode
{
get { return Sc.GlassHtml.IsInEditingMode; }
}
/// <summary>
/// Represents the current Sitecore context
/// </summary>
/// <value>The sitecore context.</value>
public ISitecoreContext SitecoreContext
{
get { return _sitecoreContext; }
}
/// <summary>
/// Access to rendering helpers
/// </summary>
/// <value>The glass HTML.</value>
protected virtual IGlassHtml GlassHtml
{
get { return _glassHtml; }
set { _glassHtml = value; }
}
private string _dataSource = null;
/// <summary>
/// The custom data source for the sublayout
/// </summary>
/// <value>The data source.</value>
public new string DataSource
{
get
{
if (_dataSource == null)
{
var parent = Parent as WebControl;
_dataSource = parent == null ? string.Empty : parent.DataSource;
}
return _dataSource;
}
set
{
_dataSource = value;
}
}
/// <summary>
/// Returns either the item specified by the DataSource or the current context item
/// </summary>
/// <value>The layout item.</value>
public Item LayoutItem
{
get { return DataSourceItem ?? Sitecore.Context.Item; }
}
/// <summary>
/// The Sitecore Item pulled from either the DataSource or Context.
/// </summary>
public Item DataSourceItem
{
get
{
return DataSource.IsNullOrEmpty()
? null
: Sitecore.Context.Database.GetItem(DataSource);
}
}
/// <summary>
/// Makes a field editable via the Page Editor. Use the Model property as the target item, e.g. model => model.Title where Title is field name.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="model">The model.</param>
/// <param name="field">The field.</param>
/// <param name="parameters">The parameters.</param>
/// <returns>System.String.</returns>
public string Editable<T>(T model, Expression<Func<T, object>> field, object parameters = null)
{
return GlassHtml.Editable(model, field, parameters);
}
/// <summary>
/// Makes a field editable via the Page Editor. Use the Model property as the target item, e.g. model => model.Title where Title is field name.
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="model">The model.</param>
/// <param name="field">The field.</param>
/// <param name="standardOutput">The standard output.</param>
/// <param name="parameters">The parameters.</param>
/// <returns>System.String.</returns>
public string Editable<T>(T model, Expression<Func<T, object>> field, Expression<Func<T, string>> standardOutput,
object parameters = null)
{
return GlassHtml.Editable(model, field, standardOutput, parameters);
}
/// <summary>
/// Renders an image allowing simple page editor support
/// </summary>
/// <typeparam name="T">The model type</typeparam>
/// <param name="model">The model that contains the image field</param>
/// <param name="field">A lambda expression to the image field, should be of type Glass.Mapper.Sc.Fields.Image</param>
/// <param name="parameters">Image parameters, e.g. width, height</param>
/// <param name="isEditable">Indicates if the field should be editable</param>
/// <param name="outputHeightWidth">Indicates if the height and width attributes should be outputted when rendering the image</param>
/// <returns></returns>
public virtual string RenderImage<T>(T model,
Expression<Func<T, object>> field,
object parameters = null,
bool isEditable = false,
bool outputHeightWidth = true)
{
return GlassHtml.RenderImage(model, field, parameters, isEditable, outputHeightWidth);
}
/// <summary>
/// Render HTML for a link with contents
/// </summary>
/// <typeparam name="T">The model type</typeparam>
/// <param name="model">The model</param>
/// <param name="field">The link field to user</param>
/// <param name="attributes">Any additional link attributes</param>
/// <param name="isEditable">Make the link editable</param>
/// <returns></returns>
public virtual RenderingResult BeginRenderLink<T>(T model, Expression<Func<T, object>> field,
object attributes = null, bool isEditable = false)
{
return GlassHtml.BeginRenderLink(model, field, this.Output, attributes, isEditable);
}
/// <summary>
/// Render HTML for a link
/// </summary>
/// <typeparam name="T">The model type</typeparam>
/// <param name="model">The model</param>
/// <param name="field">The link field to user</param>
/// <param name="attributes">Any additional link attributes</param>
/// <param name="isEditable">Make the link editable</param>
/// <param name="contents">Content to override the default decription or item name</param>
/// <returns></returns>
public virtual string RenderLink<T>(T model, Expression<Func<T, object>> field, object attributes = null,
bool isEditable = false, string contents = null)
{
return GlassHtml.RenderLink(model, field, attributes, isEditable, contents);
}
public override void RenderControl(HtmlTextWriter writer)
{
this._writer = writer;
base.RenderControl(writer);
}
}
}
| |
using System;
using System.Xml.Xsl;
using System.Xml;
using System.Xml.XPath;
using System.Reflection;
namespace Hydra.Framework.XmlSerialization.Exslt
{
//
//**********************************************************************
/// <summary>
/// Custom <see cref="XsltContext"/> implementation providing support for EXSLT
/// functions in XPath-only environment.
/// </summary>
//**********************************************************************
//
public class ExsltContext : XsltContext
{
#region Private Fields and Properties
private XmlNameTable _nt;
//
//**********************************************************************
/// <summary>
/// Bitwise enumeration used to specify which EXSLT functions should be accessible to
/// in the ExsltContext object. The default value is ExsltFunctionNamespace.All
/// </summary>
//**********************************************************************
//
private ExsltFunctionNamespace _supportedFunctions = ExsltFunctionNamespace.All;
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://exslt.org/math namespace
/// </summary>
//**********************************************************************
//
private ExsltMath exsltMath = new ExsltMath();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://exslt.org/dates-and-times namespace
/// </summary>
//**********************************************************************
//
private ExsltDatesAndTimes exsltDatesAndTimes = new ExsltDatesAndTimes();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://exslt.org/regular-expressions namespace
/// </summary>
//**********************************************************************
//
private ExsltRegularExpressions exsltRegularExpressions = new ExsltRegularExpressions();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://exslt.org/strings namespace
/// </summary>
//**********************************************************************
//
private ExsltStrings exsltStrings = new ExsltStrings();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://exslt.org/sets namespace
/// </summary>
//**********************************************************************
//
private ExsltSets exsltSets = new ExsltSets();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://exslt.org/random namespace
/// </summary>
//**********************************************************************
//
private ExsltRandom exsltRandom = new ExsltRandom();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://gotdotnet.com/exslt/dates-and-times namespace
/// </summary>
//**********************************************************************
//
private GDNDatesAndTimes gdnDatesAndTimes = new GDNDatesAndTimes();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://gotdotnet.com/exslt/regular-expressions namespace
/// </summary>
//**********************************************************************
//
private GDNRegularExpressions gdnRegularExpressions = new GDNRegularExpressions();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://gotdotnet.com/exslt/math namespace
/// </summary>
//**********************************************************************
//
private GDNMath gdnMath = new GDNMath();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://gotdotnet.com/exslt/sets namespace
/// </summary>
//**********************************************************************
//
private GDNSets gdnSets = new GDNSets();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://gotdotnet.com/exslt/strings namespace
/// </summary>
//**********************************************************************
//
private GDNStrings gdnStrings = new GDNStrings();
//
//**********************************************************************
/// <summary>
/// Extension object which implements the functions in the http://gotdotnet.com/exslt/dynamic namespace
/// </summary>
//**********************************************************************
//
private GDNDynamic gdnDynamic = new GDNDynamic();
#endregion
#region Constructors
//
//**********************************************************************
/// <summary>
/// Creates new ExsltContext instance.
/// </summary>
//**********************************************************************
//
public ExsltContext(XmlNameTable nt)
: base((NameTable)nt)
{
_nt = nt;
AddExtensionNamespaces();
}
//
//**********************************************************************
/// <summary>
/// Creates new ExsltContext instance.
/// </summary>
//**********************************************************************
//
public ExsltContext(NameTable nt, ExsltFunctionNamespace supportedFunctions)
: this(nt)
{
SupportedFunctions = supportedFunctions;
}
#endregion
#region Private methods
private void AddExtensionNamespaces()
{
//remove all our extension objects in case the ExsltContext is being reused
RemoveNamespace("math", ExsltNamespaces.Math);
RemoveNamespace("date", ExsltNamespaces.DatesAndTimes);
RemoveNamespace("regexp", ExsltNamespaces.RegularExpressions);
RemoveNamespace("str", ExsltNamespaces.Strings);
RemoveNamespace("set", ExsltNamespaces.Sets);
RemoveNamespace("random", ExsltNamespaces.Random);
RemoveNamespace("date2", ExsltNamespaces.GDNDatesAndTimes);
RemoveNamespace("math2", ExsltNamespaces.GDNMath);
RemoveNamespace("regexp2", ExsltNamespaces.GDNRegularExpressions);
RemoveNamespace("set2", ExsltNamespaces.GDNSets);
RemoveNamespace("str2", ExsltNamespaces.GDNStrings);
RemoveNamespace("dyn2", ExsltNamespaces.GDNDynamic);
//add extension objects as specified by SupportedFunctions
if ((this.SupportedFunctions & ExsltFunctionNamespace.Math) > 0)
AddNamespace("math", ExsltNamespaces.Math);
if ((this.SupportedFunctions & ExsltFunctionNamespace.DatesAndTimes) > 0)
AddNamespace("date", ExsltNamespaces.DatesAndTimes);
if ((this.SupportedFunctions & ExsltFunctionNamespace.RegularExpressions) > 0)
AddNamespace("regexp", ExsltNamespaces.RegularExpressions);
if ((this.SupportedFunctions & ExsltFunctionNamespace.Strings) > 0)
AddNamespace("str", ExsltNamespaces.Strings);
if ((this.SupportedFunctions & ExsltFunctionNamespace.Sets) > 0)
AddNamespace("set", ExsltNamespaces.Sets);
if ((this.SupportedFunctions & ExsltFunctionNamespace.Random) > 0)
AddNamespace("random", ExsltNamespaces.Random);
if ((this.SupportedFunctions & ExsltFunctionNamespace.GDNDatesAndTimes) > 0)
AddNamespace("date2", ExsltNamespaces.GDNDatesAndTimes);
if ((this.SupportedFunctions & ExsltFunctionNamespace.GDNMath) > 0)
AddNamespace("math2", ExsltNamespaces.GDNMath);
if ((this.SupportedFunctions & ExsltFunctionNamespace.GDNRegularExpressions) > 0)
AddNamespace("regexp2", ExsltNamespaces.GDNRegularExpressions);
if ((this.SupportedFunctions & ExsltFunctionNamespace.GDNSets) > 0)
AddNamespace("set2", ExsltNamespaces.GDNSets);
if ((this.SupportedFunctions & ExsltFunctionNamespace.GDNStrings) > 0)
AddNamespace("str2", ExsltNamespaces.GDNStrings);
if ((this.SupportedFunctions & ExsltFunctionNamespace.GDNDynamic) > 0)
AddNamespace("dyn2", ExsltNamespaces.GDNDynamic);
}
#endregion
#region Public Properties
//
//**********************************************************************
/// <summary>
/// Bitwise enumeration used to specify which EXSLT functions should be accessible to
/// in the ExsltContext. The default value is ExsltFunctionNamespace.All
/// </summary>
//**********************************************************************
//
public ExsltFunctionNamespace SupportedFunctions
{
set
{
if (Enum.IsDefined(typeof(ExsltFunctionNamespace), value))
_supportedFunctions = value;
}
get { return _supportedFunctions; }
}
#endregion
#region XsltContext Overrides
//
//**********************************************************************
/// <summary>
/// See <see cref="XsltContext.CompareDocument"/>
/// </summary>
//**********************************************************************
//
public override int CompareDocument(string baseUri, string nextbaseUri)
{
return 0;
}
//
//**********************************************************************
/// <summary>
/// See <see cref="XsltContext.PreserveWhitespace"/>
/// </summary>
//**********************************************************************
//
public override bool PreserveWhitespace(XPathNavigator node)
{
return true;
}
//
//**********************************************************************
/// <summary>
/// See <see cref="XsltContext.Whitespace"/>
/// </summary>
//**********************************************************************
//
public override bool Whitespace
{
get { return true; }
}
//
//**********************************************************************
/// <summary>
/// Resolves variables.
/// </summary>
/// <param name="prefix">The variable's prefix</param>
/// <param name="name">The variable's name</param>
/// <returns></returns>
//**********************************************************************
//
public override IXsltContextVariable ResolveVariable(string prefix, string name)
{
return null;
}
//
//**********************************************************************
/// <summary>
/// Resolves custom function in XPath expression.
/// </summary>
/// <param name="prefix">The prefix of the function as it appears in the XPath expression.</param>
/// <param name="name">The name of the function.</param>
/// <param name="argTypes">An array of argument types for the function being resolved.
/// This allows you to select between methods with the same name (for example, overloaded
/// methods). </param>
/// <returns>An IXsltContextFunction representing the function.</returns>
//**********************************************************************
//
public override IXsltContextFunction ResolveFunction(string prefix, string name,
XPathResultType[] argTypes)
{
switch (LookupNamespace(_nt.Get(prefix)))
{
case ExsltNamespaces.DatesAndTimes:
return GetExtensionFunctionImplementation(exsltDatesAndTimes, name, argTypes);
case ExsltNamespaces.Math:
return GetExtensionFunctionImplementation(exsltMath, name, argTypes);
case ExsltNamespaces.RegularExpressions:
return GetExtensionFunctionImplementation(exsltRegularExpressions, name, argTypes);
case ExsltNamespaces.Sets:
return GetExtensionFunctionImplementation(exsltSets, name, argTypes);
case ExsltNamespaces.Strings:
return GetExtensionFunctionImplementation(exsltStrings, name, argTypes);
case ExsltNamespaces.Random:
return GetExtensionFunctionImplementation(exsltRandom, name, argTypes);
case ExsltNamespaces.GDNDatesAndTimes:
return GetExtensionFunctionImplementation(gdnDatesAndTimes, name, argTypes);
case ExsltNamespaces.GDNMath:
return GetExtensionFunctionImplementation(gdnMath, name, argTypes);
case ExsltNamespaces.GDNRegularExpressions:
return GetExtensionFunctionImplementation(gdnRegularExpressions, name, argTypes);
case ExsltNamespaces.GDNSets:
return GetExtensionFunctionImplementation(gdnSets, name, argTypes);
case ExsltNamespaces.GDNStrings:
return GetExtensionFunctionImplementation(gdnStrings, name, argTypes);
case ExsltNamespaces.GDNDynamic:
return GetExtensionFunctionImplementation(gdnDynamic, name, argTypes);
default:
throw new XPathException(string.Format("Unrecognized extension function namespace: prefix='{0}', namespace URI='{1}'",
prefix, LookupNamespace(_nt.Get(prefix))), null);
}
}
#endregion
//
//**********************************************************************
/// <summary>
/// Finds appropriate implementation for an extension function - public
/// method with the same number of arguments and compatible argument types.
/// </summary>
/// <param name="obj">Extension object</param>
/// <param name="name">Function name</param>
/// <param name="argTypes">Types of arguments</param>
/// <returns></returns>
//**********************************************************************
//
private ExsltContextFunction GetExtensionFunctionImplementation(object obj, string name, XPathResultType[] argTypes)
{
//For each method in object's type
foreach (MethodInfo mi in obj.GetType().GetMethods())
{
//We are interested in methods with given name
if (mi.Name == name)
{
ParameterInfo[] parameters = mi.GetParameters();
////We are interested in methods with given number of arguments
if (parameters.Length == argTypes.Length)
{
bool mismatch = false;
//Now let's check out if parameter types are compatible with actual ones
for (int i = 0; i < parameters.Length; i++)
{
ParameterInfo pi = parameters[i];
XPathResultType paramType = ConvertToXPathType(pi.ParameterType);
if (paramType == XPathResultType.Any || paramType == argTypes[i])
continue;
else
{
mismatch = true;
break;
}
}
if (!mismatch)
//Create lightweight wrapper around method info
return new ExsltContextFunction(mi, argTypes, obj);
}
}
}
throw new XPathException("Extension function not found: " + name, null);
}
//
//**********************************************************************
/// <summary>
/// Converts CLI type to XPathResultType type.
/// </summary>
/// <param name="type"></param>
/// <returns></returns>
//**********************************************************************
//
public static XPathResultType ConvertToXPathType(Type type)
{
switch (Type.GetTypeCode(type))
{
case TypeCode.Boolean:
return XPathResultType.Boolean;
case TypeCode.String:
return XPathResultType.String;
case TypeCode.Object:
if (typeof(IXPathNavigable).IsAssignableFrom(type) ||
typeof(XPathNavigator).IsAssignableFrom(type))
return XPathResultType.Navigator;
else if (typeof(XPathNodeIterator).IsAssignableFrom(type))
return XPathResultType.NodeSet;
else
return XPathResultType.Any;
case TypeCode.DateTime:
case TypeCode.DBNull:
case TypeCode.Empty:
return XPathResultType.Error;
default:
return XPathResultType.Number;
}
}
//TODO: test it
///// <summary>
///// This is a workaround for some problem, see
///// http://www.tkachenko.com/blog/archives/000042.html for more
///// details.
///// </summary>
///// <param name="prefix">Prefix to be resolved</param>
///// <returns>Resolved namespace</returns>
//public override string LookupNamespace(string prefix)
//{
// if (prefix == String.Empty)
// return prefix;
// string uri = base.LookupNamespace(NameTable.Get(prefix));
// if (uri == null)
// throw new XsltException("Undeclared namespace prefix - " + prefix, null);
//
// return uri;
//}
//
}
} // namespace GotDotNet.Exslt
| |
namespace LeagueSeason5CountersService.Migrations
{
using System;
using System.Collections.Generic;
using System.Data.Entity.Infrastructure.Annotations;
using System.Data.Entity.Migrations;
public partial class AddCounters : DbMigration
{
public override void Up()
{
CreateTable(
"LeagueSeason5Counters.Counters",
c => new
{
Id = c.String(nullable: false, maxLength: 128,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Id")
},
}),
Name = c.String(),
Score = c.Int(nullable: false),
ChampionFeedbackName = c.String(),
ChampionFeedbackId = c.String(maxLength: 128),
CreatedAt = c.DateTimeOffset(precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "CreatedAt")
},
}),
Deleted = c.Boolean(nullable: false,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Deleted")
},
}),
UpdatedAt = c.DateTimeOffset(precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "UpdatedAt")
},
}),
Version = c.Binary(nullable: false, fixedLength: true, timestamp: true, storeType: "rowversion",
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Version")
},
}),
})
.PrimaryKey(t => t.Id)
.ForeignKey("LeagueSeason5Counters.ChampionFeedbacks", t => t.ChampionFeedbackId)
.Index(t => t.ChampionFeedbackId);
CreateTable(
"LeagueSeason5Counters.CounterRatings",
c => new
{
Id = c.String(nullable: false, maxLength: 128,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Id")
},
}),
UniqueUser = c.String(),
Score = c.Int(nullable: false),
CounterId = c.String(maxLength: 128),
CreatedAt = c.DateTimeOffset(precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "CreatedAt")
},
}),
Deleted = c.Boolean(nullable: false,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Deleted")
},
}),
UpdatedAt = c.DateTimeOffset(precision: 7,
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "UpdatedAt")
},
}),
Version = c.Binary(nullable: false, fixedLength: true, timestamp: true, storeType: "rowversion",
annotations: new Dictionary<string, AnnotationValues>
{
{
"ServiceTableColumn",
new AnnotationValues(oldValue: null, newValue: "Version")
},
}),
})
.PrimaryKey(t => t.Id)
.ForeignKey("LeagueSeason5Counters.Counters", t => t.CounterId)
.Index(t => t.CounterId);
}
public override void Down()
{
DropForeignKey("LeagueSeason5Counters.CounterRatings", "CounterId", "LeagueSeason5Counters.Counters");
DropForeignKey("LeagueSeason5Counters.Counters", "ChampionFeedbackId", "LeagueSeason5Counters.ChampionFeedbacks");
DropIndex("LeagueSeason5Counters.CounterRatings", new[] { "CounterId" });
DropIndex("LeagueSeason5Counters.Counters", new[] { "ChampionFeedbackId" });
DropTable("LeagueSeason5Counters.CounterRatings",
removedColumnAnnotations: new Dictionary<string, IDictionary<string, object>>
{
{
"CreatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "CreatedAt" },
}
},
{
"Deleted",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Deleted" },
}
},
{
"Id",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Id" },
}
},
{
"UpdatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "UpdatedAt" },
}
},
{
"Version",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Version" },
}
},
});
DropTable("LeagueSeason5Counters.Counters",
removedColumnAnnotations: new Dictionary<string, IDictionary<string, object>>
{
{
"CreatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "CreatedAt" },
}
},
{
"Deleted",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Deleted" },
}
},
{
"Id",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Id" },
}
},
{
"UpdatedAt",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "UpdatedAt" },
}
},
{
"Version",
new Dictionary<string, object>
{
{ "ServiceTableColumn", "Version" },
}
},
});
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Xunit;
namespace System.Numerics.Tests
{
public class logTest
{
private static int s_samples = 10;
private static Random s_random = new Random(100);
[Fact]
public static void RunLogTests()
{
byte[] tempByteArray1 = new byte[0];
byte[] tempByteArray2 = new byte[0];
BigInteger bi;
// Log Method - Log(1,+Infinity)
Assert.Equal(0, BigInteger.Log(1, Double.PositiveInfinity));
// Log Method - Log(1,0)
VerifyLogString("0 1 bLog");
// Log Method - Log(0, >1)
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomPosByteArray(s_random, 10);
VerifyLogString(Print(tempByteArray1) + "0 bLog");
}
// Log Method - Log(0, 0>x>1)
for (int i = 0; i < s_samples; i++)
{
Assert.Equal(Double.PositiveInfinity, BigInteger.Log(0, s_random.NextDouble()));
}
// Log Method - base = 0
for (int i = 0; i < s_samples; i++)
{
bi = 1;
while (bi == 1)
{
bi = new BigInteger(GetRandomPosByteArray(s_random, 8));
}
Assert.True((Double.IsNaN(BigInteger.Log(bi, 0))));
}
// Log Method - base = 1
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random);
VerifyLogString("1 " + Print(tempByteArray1) + "bLog");
}
// Log Method - base = NaN
for (int i = 0; i < s_samples; i++)
{
Assert.True(Double.IsNaN(BigInteger.Log(new BigInteger(GetRandomByteArray(s_random, 10)), Double.NaN)));
}
// Log Method - base = +Infinity
for (int i = 0; i < s_samples; i++)
{
Assert.True(Double.IsNaN(BigInteger.Log(new BigInteger(GetRandomByteArray(s_random, 10)), Double.PositiveInfinity)));
}
// Log Method - Log(0,1)
VerifyLogString("1 0 bLog");
// Log Method - base < 0
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomByteArray(s_random, 10);
tempByteArray2 = GetRandomNegByteArray(s_random, 1);
VerifyLogString(Print(tempByteArray2) + Print(tempByteArray1) + "bLog");
Assert.True(Double.IsNaN(BigInteger.Log(new BigInteger(GetRandomByteArray(s_random, 10)), -s_random.NextDouble())));
}
// Log Method - value < 0
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomNegByteArray(s_random, 10);
tempByteArray2 = GetRandomPosByteArray(s_random, 1);
VerifyLogString(Print(tempByteArray2) + Print(tempByteArray1) + "bLog");
}
// Log Method - Small BigInteger and 0<base<0.5
for (int i = 0; i < s_samples; i++)
{
BigInteger temp = new BigInteger(GetRandomPosByteArray(s_random, 10));
Double newbase = Math.Min(s_random.NextDouble(), 0.5);
Assert.True(ApproxEqual(BigInteger.Log(temp, newbase), Math.Log((double)temp, newbase)));
}
// Log Method - Large BigInteger and 0<base<0.5
for (int i = 0; i < s_samples; i++)
{
BigInteger temp = new BigInteger(GetRandomPosByteArray(s_random, s_random.Next(1, 100)));
Double newbase = Math.Min(s_random.NextDouble(), 0.5);
Assert.True(ApproxEqual(BigInteger.Log(temp, newbase), Math.Log((double)temp, newbase)));
}
// Log Method - two small BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomPosByteArray(s_random, 2);
tempByteArray2 = GetRandomPosByteArray(s_random, 3);
VerifyLogString(Print(tempByteArray1) + Print(tempByteArray2) + "bLog");
}
// Log Method - one small and one large BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomPosByteArray(s_random, 1);
tempByteArray2 = GetRandomPosByteArray(s_random, s_random.Next(1, 100));
VerifyLogString(Print(tempByteArray1) + Print(tempByteArray2) + "bLog");
}
// Log Method - two large BigIntegers
for (int i = 0; i < s_samples; i++)
{
tempByteArray1 = GetRandomPosByteArray(s_random, s_random.Next(1, 100));
tempByteArray2 = GetRandomPosByteArray(s_random, s_random.Next(1, 100));
VerifyLogString(Print(tempByteArray1) + Print(tempByteArray2) + "bLog");
}
// Log Method - Very Large BigInteger 1 << 128 << Int.MaxValue and 2
LargeValueLogTests(128, 1);
}
[Fact]
[OuterLoop]
public static void RunLargeValueLogTests()
{
LargeValueLogTests(0, 5, 64, 4);
}
/// <summary>
/// Test Log Method on Very Large BigInteger more than (1 << Int.MaxValue) by base 2
/// Tested BigInteger are: pow(2, startShift + smallLoopShift * [1..smallLoopLimit] + Int32.MaxValue * [1..bigLoopLimit])
/// Note:
/// ToString() can not operate such large values
/// VerifyLogString() can not operate such large values,
/// Math.Log() can not operate such large values
/// </summary>
private static void LargeValueLogTests(int startShift, int bigShiftLoopLimit, int smallShift = 0, int smallShiftLoopLimit = 1)
{
BigInteger init = BigInteger.One << startShift;
double logbase = 2D;
for (int i = 0; i < smallShiftLoopLimit; i++)
{
BigInteger temp = init << ((i + 1) * smallShift);
for (int j = 0; j<bigShiftLoopLimit; j++)
{
temp = temp << Int32.MaxValue;
double expected =
(double)startShift +
smallShift * (double)(i + 1) +
Int32.MaxValue * (double)(j + 1);
Assert.True(ApproxEqual(BigInteger.Log(temp, logbase), expected));
}
}
}
private static void VerifyLogString(string opstring)
{
StackCalc sc = new StackCalc(opstring);
while (sc.DoNextOperation())
{
Assert.Equal(sc.snCalc.Peek().ToString(), sc.myCalc.Peek().ToString());
}
}
private static void VerifyIdentityString(string opstring1, string opstring2)
{
StackCalc sc1 = new StackCalc(opstring1);
while (sc1.DoNextOperation())
{
//Run the full calculation
sc1.DoNextOperation();
}
StackCalc sc2 = new StackCalc(opstring2);
while (sc2.DoNextOperation())
{
//Run the full calculation
sc2.DoNextOperation();
}
Assert.Equal(sc1.snCalc.Peek().ToString(), sc2.snCalc.Peek().ToString());
}
private static byte[] GetRandomByteArray(Random random)
{
return GetRandomByteArray(random, random.Next(0, 100));
}
private static byte[] GetRandomByteArray(Random random, int size)
{
return MyBigIntImp.GetRandomByteArray(random, size);
}
private static Byte[] GetRandomPosByteArray(Random random, int size)
{
byte[] value = new byte[size];
for (int i = 0; i < value.Length; i++)
{
value[i] = (byte)random.Next(0, 256);
}
value[value.Length - 1] &= 0x7F;
return value;
}
private static Byte[] GetRandomNegByteArray(Random random, int size)
{
byte[] value = new byte[size];
for (int i = 0; i < value.Length; ++i)
{
value[i] = (byte)random.Next(0, 256);
}
value[value.Length - 1] |= 0x80;
return value;
}
private static String Print(byte[] bytes)
{
return MyBigIntImp.Print(bytes);
}
private static bool ApproxEqual(double value1, double value2)
{
//Special case values;
if (Double.IsNaN(value1))
{
return Double.IsNaN(value2);
}
if (Double.IsNegativeInfinity(value1))
{
return Double.IsNegativeInfinity(value2);
}
if (Double.IsPositiveInfinity(value1))
{
return Double.IsPositiveInfinity(value2);
}
if (value2 == 0)
{
return (value1 == 0);
}
double result = Math.Abs((value1 / value2) - 1);
return (result <= Double.Parse("1e-15"));
}
}
}
| |
using System;
using System.Collections;
using System.IO;
using NBitcoin.BouncyCastle.Utilities;
namespace NBitcoin.BouncyCastle.Crypto.Parameters
{
/// <summary>
/// Parameters for the Skein hash function - a series of byte[] strings identified by integer tags.
/// </summary>
/// <remarks>
/// Parameterised Skein can be used for:
/// <ul>
/// <li>MAC generation, by providing a <see cref="SkeinParameters.Builder.SetKey(byte[])">key</see>.</li>
/// <li>Randomised hashing, by providing a <see cref="SkeinParameters.Builder.SetNonce(byte[])">nonce</see>.</li>
/// <li>A hash function for digital signatures, associating a
/// <see cref="SkeinParameters.Builder.SetPublicKey(byte[])">public key</see> with the message digest.</li>
/// <li>A key derivation function, by providing a
/// <see cref="SkeinParameters.Builder.SetKeyIdentifier(byte[])">key identifier</see>.</li>
/// <li>Personalised hashing, by providing a
/// <see cref="SkeinParameters.Builder.SetPersonalisation(DateTime,string,string)">recommended format</see> or
/// <see cref="SkeinParameters.Builder.SetPersonalisation(byte[])">arbitrary</see> personalisation string.</li>
/// </ul>
/// </remarks>
/// <seealso cref="NBitcoin.BouncyCastle.Crypto.Digests.SkeinEngine"/>
/// <seealso cref="NBitcoin.BouncyCastle.Crypto.Digests.SkeinDigest"/>
/// <seealso cref="NBitcoin.BouncyCastle.Crypto.Macs.SkeinMac"/>
public class SkeinParameters
: ICipherParameters
{
/// <summary>
/// The parameter type for a secret key, supporting MAC or KDF functions: 0
/// </summary>
public const int PARAM_TYPE_KEY = 0;
/// <summary>
/// The parameter type for the Skein configuration block: 4
/// </summary>
public const int PARAM_TYPE_CONFIG = 4;
/// <summary>
/// The parameter type for a personalisation string: 8
/// </summary>
public const int PARAM_TYPE_PERSONALISATION = 8;
/// <summary>
/// The parameter type for a public key: 12
/// </summary>
public const int PARAM_TYPE_PUBLIC_KEY = 12;
/// <summary>
/// The parameter type for a key identifier string: 16
/// </summary>
public const int PARAM_TYPE_KEY_IDENTIFIER = 16;
/// <summary>
/// The parameter type for a nonce: 20
/// </summary>
public const int PARAM_TYPE_NONCE = 20;
/// <summary>
/// The parameter type for the message: 48
/// </summary>
public const int PARAM_TYPE_MESSAGE = 48;
/// <summary>
/// The parameter type for the output transformation: 63
/// </summary>
public const int PARAM_TYPE_OUTPUT = 63;
private IDictionary parameters;
public SkeinParameters()
: this(Platform.CreateHashtable())
{
}
private SkeinParameters(IDictionary parameters)
{
this.parameters = parameters;
}
/// <summary>
/// Obtains a map of type (int) to value (byte[]) for the parameters tracked in this object.
/// </summary>
public IDictionary GetParameters()
{
return parameters;
}
/// <summary>
/// Obtains the value of the <see cref="PARAM_TYPE_KEY">key parameter</see>, or <code>null</code> if not
/// set.
/// </summary>
/// <returns>The key.</returns>
public byte[] GetKey()
{
return (byte[])parameters[PARAM_TYPE_KEY];
}
/// <summary>
/// Obtains the value of the <see cref="PARAM_TYPE_PERSONALISATION">personalisation parameter</see>, or
/// <code>null</code> if not set.
/// </summary>
public byte[] GetPersonalisation()
{
return (byte[])parameters[PARAM_TYPE_PERSONALISATION];
}
/// <summary>
/// Obtains the value of the <see cref="PARAM_TYPE_PUBLIC_KEY">public key parameter</see>, or
/// <code>null</code> if not set.
/// </summary>
public byte[] GetPublicKey()
{
return (byte[])parameters[PARAM_TYPE_PUBLIC_KEY];
}
/// <summary>
/// Obtains the value of the <see cref="PARAM_TYPE_KEY_IDENTIFIER">key identifier parameter</see>, or
/// <code>null</code> if not set.
/// </summary>
public byte[] GetKeyIdentifier()
{
return (byte[])parameters[PARAM_TYPE_KEY_IDENTIFIER];
}
/// <summary>
/// Obtains the value of the <see cref="PARAM_TYPE_NONCE">nonce parameter</see>, or <code>null</code> if
/// not set.
/// </summary>
public byte[] GetNonce()
{
return (byte[])parameters[PARAM_TYPE_NONCE];
}
/// <summary>
/// A builder for <see cref="SkeinParameters"/>.
/// </summary>
public class Builder
{
private IDictionary parameters = Platform.CreateHashtable();
public Builder()
{
}
public Builder(IDictionary paramsMap)
{
IEnumerator keys = paramsMap.Keys.GetEnumerator();
while (keys.MoveNext())
{
int key = (int)keys.Current;
parameters.Add(key, paramsMap[key]);
}
}
public Builder(SkeinParameters parameters)
{
IEnumerator keys = parameters.parameters.Keys.GetEnumerator();
while (keys.MoveNext())
{
int key = (int)keys.Current;
this.parameters.Add(key, parameters.parameters[key]);
}
}
/// <summary>
/// Sets a parameters to apply to the Skein hash function.
/// </summary>
/// <remarks>
/// Parameter types must be in the range 0,5..62, and cannot use the value 48
/// (reserved for message body).
/// <p/>
/// Parameters with type < 48 are processed before
/// the message content, parameters with type > 48
/// are processed after the message and prior to output.
/// </remarks>
/// <param name="type">the type of the parameter, in the range 5..62.</param>
/// <param name="value">the byte sequence of the parameter.</param>
public Builder Set(int type, byte[] value)
{
if (value == null)
{
throw new ArgumentException("Parameter value must not be null.");
}
if ((type != PARAM_TYPE_KEY)
&& (type <= PARAM_TYPE_CONFIG || type >= PARAM_TYPE_OUTPUT || type == PARAM_TYPE_MESSAGE))
{
throw new ArgumentException("Parameter types must be in the range 0,5..47,49..62.");
}
if (type == PARAM_TYPE_CONFIG)
{
throw new ArgumentException("Parameter type " + PARAM_TYPE_CONFIG
+ " is reserved for internal use.");
}
this.parameters.Add(type, value);
return this;
}
/// <summary>
/// Sets the <see cref="SkeinParameters.PARAM_TYPE_KEY"/> parameter.
/// </summary>
public Builder SetKey(byte[] key)
{
return Set(PARAM_TYPE_KEY, key);
}
/// <summary>
/// Sets the <see cref="SkeinParameters.PARAM_TYPE_PERSONALISATION"/> parameter.
/// </summary>
public Builder SetPersonalisation(byte[] personalisation)
{
return Set(PARAM_TYPE_PERSONALISATION, personalisation);
}
/// <summary>
/// Implements the recommended personalisation format for Skein defined in Section 4.11 of
/// the Skein 1.3 specification.
/// </summary>
/// <remarks>
/// The format is <code>YYYYMMDD email@address distinguisher</code>, encoded to a byte
/// sequence using UTF-8 encoding.
/// </remarks>
/// <param name="date">the date the personalised application of the Skein was defined.</param>
/// <param name="emailAddress">the email address of the creation of the personalised application.</param>
/// <param name="distinguisher">an arbitrary personalisation string distinguishing the application.</param>
public Builder SetPersonalisation(DateTime date, string emailAddress, string distinguisher)
{
try
{
MemoryStream bout = new MemoryStream();
StreamWriter outBytes = new StreamWriter(bout, System.Text.Encoding.UTF8);
outBytes.Write(date.ToString("YYYYMMDD"));
outBytes.Write(" ");
outBytes.Write(emailAddress);
outBytes.Write(" ");
outBytes.Write(distinguisher);
outBytes.Dispose();
return Set(PARAM_TYPE_PERSONALISATION, bout.ToArray());
}
catch (IOException e)
{
throw new InvalidOperationException("Byte I/O failed.", e);
}
}
/// <summary>
/// Sets the <see cref="SkeinParameters.PARAM_TYPE_KEY_IDENTIFIER"/> parameter.
/// </summary>
public Builder SetPublicKey(byte[] publicKey)
{
return Set(PARAM_TYPE_PUBLIC_KEY, publicKey);
}
/// <summary>
/// Sets the <see cref="SkeinParameters.PARAM_TYPE_KEY_IDENTIFIER"/> parameter.
/// </summary>
public Builder SetKeyIdentifier(byte[] keyIdentifier)
{
return Set(PARAM_TYPE_KEY_IDENTIFIER, keyIdentifier);
}
/// <summary>
/// Sets the <see cref="SkeinParameters.PARAM_TYPE_NONCE"/> parameter.
/// </summary>
public Builder SetNonce(byte[] nonce)
{
return Set(PARAM_TYPE_NONCE, nonce);
}
/// <summary>
/// Constructs a new <see cref="SkeinParameters"/> instance with the parameters provided to this
/// builder.
/// </summary>
public SkeinParameters Build()
{
return new SkeinParameters(parameters);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Runtime;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.Runtime.ConsistentRing;
using Orleans.Runtime.Counters;
using Orleans.Runtime.GrainDirectory;
using Orleans.Runtime.Messaging;
using Orleans.Runtime.ReminderService;
using Orleans.Runtime.Scheduler;
using Orleans.Services;
using Orleans.Configuration;
using Orleans.Serialization;
using Orleans.Internal;
namespace Orleans.Runtime
{
/// <summary>
/// Orleans silo.
/// </summary>
public class Silo
{
/// <summary>Standard name for Primary silo. </summary>
public const string PrimarySiloName = "Primary";
private readonly ILocalSiloDetails siloDetails;
private readonly MessageCenter messageCenter;
private readonly LocalGrainDirectory localGrainDirectory;
private readonly ILogger logger;
private readonly TaskCompletionSource<int> siloTerminatedTask = new TaskCompletionSource<int>(TaskCreationOptions.RunContinuationsAsynchronously);
private readonly SiloStatisticsManager siloStatistics;
private readonly InsideRuntimeClient runtimeClient;
private IReminderService reminderService;
private SystemTarget fallbackScheduler;
private readonly ISiloStatusOracle siloStatusOracle;
private Watchdog platformWatchdog;
private readonly TimeSpan waitForMessageToBeQueuedForOutbound;
private readonly TimeSpan initTimeout;
private readonly TimeSpan stopTimeout = TimeSpan.FromMinutes(1);
private readonly Catalog catalog;
private readonly object lockable = new object();
private readonly GrainFactory grainFactory;
private readonly ISiloLifecycleSubject siloLifecycle;
private readonly IMembershipService membershipService;
internal List<GrainService> grainServices = new List<GrainService>();
private readonly ILoggerFactory loggerFactory;
/// <summary>
/// Gets the type of this
/// </summary>
internal string Name => this.siloDetails.Name;
internal ILocalGrainDirectory LocalGrainDirectory { get { return localGrainDirectory; } }
internal IConsistentRingProvider RingProvider { get; private set; }
internal List<GrainService> GrainServices => grainServices;
internal SystemStatus SystemStatus { get; set; }
internal IServiceProvider Services { get; }
/// <summary>Gets the address of this silo.</summary>
public SiloAddress SiloAddress => this.siloDetails.SiloAddress;
/// <summary>
/// Gets a <see cref="Task"/> which completes once the silo has terminated.
/// </summary>
public Task SiloTerminated { get { return this.siloTerminatedTask.Task; } } // one event for all types of termination (shutdown, stop and fast kill).
private bool isFastKilledNeeded = false; // Set to true if something goes wrong in the shutdown/stop phase
private IGrainContext reminderServiceContext;
private LifecycleSchedulingSystemTarget lifecycleSchedulingSystemTarget;
/// <summary>
/// Initializes a new instance of the <see cref="Silo"/> class.
/// </summary>
/// <param name="siloDetails">The silo initialization parameters</param>
/// <param name="services">Dependency Injection container</param>
[Obsolete("This constructor is obsolete and may be removed in a future release. Use SiloHostBuilder to create an instance of ISiloHost instead.")]
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope",
Justification = "Should not Dispose of messageCenter in this method because it continues to run / exist after this point.")]
public Silo(ILocalSiloDetails siloDetails, IServiceProvider services)
{
string name = siloDetails.Name;
// Temporarily still require this. Hopefuly gone when 2.0 is released.
this.siloDetails = siloDetails;
this.SystemStatus = SystemStatus.Creating;
var startTime = DateTime.UtcNow;
IOptions<ClusterMembershipOptions> clusterMembershipOptions = services.GetRequiredService<IOptions<ClusterMembershipOptions>>();
initTimeout = clusterMembershipOptions.Value.MaxJoinAttemptTime;
if (Debugger.IsAttached)
{
initTimeout = StandardExtensions.Max(TimeSpan.FromMinutes(10), clusterMembershipOptions.Value.MaxJoinAttemptTime);
stopTimeout = initTimeout;
}
var localEndpoint = this.siloDetails.SiloAddress.Endpoint;
this.Services = services;
//set PropagateActivityId flag from node config
IOptions<SiloMessagingOptions> messagingOptions = services.GetRequiredService<IOptions<SiloMessagingOptions>>();
RequestContext.PropagateActivityId = messagingOptions.Value.PropagateActivityId;
this.waitForMessageToBeQueuedForOutbound = messagingOptions.Value.WaitForMessageToBeQueuedForOutboundTime;
this.loggerFactory = this.Services.GetRequiredService<ILoggerFactory>();
logger = this.loggerFactory.CreateLogger<Silo>();
logger.Info(ErrorCode.SiloGcSetting, "Silo starting with GC settings: ServerGC={0} GCLatencyMode={1}", GCSettings.IsServerGC, Enum.GetName(typeof(GCLatencyMode), GCSettings.LatencyMode));
if (!GCSettings.IsServerGC)
{
logger.Warn(ErrorCode.SiloGcWarning, "Note: Silo not running with ServerGC turned on - recommend checking app config : <configuration>-<runtime>-<gcServer enabled=\"true\">");
logger.Warn(ErrorCode.SiloGcWarning, "Note: ServerGC only kicks in on multi-core systems (settings enabling ServerGC have no effect on single-core machines).");
}
if (logger.IsEnabled(LogLevel.Debug))
{
var highestLogLevel = logger.IsEnabled(LogLevel.Trace) ? nameof(LogLevel.Trace) : nameof(LogLevel.Debug);
logger.LogWarning(
new EventId((int)ErrorCode.SiloGcWarning),
$"A verbose logging level ({highestLogLevel}) is configured. This will impact performance. The recommended log level is {nameof(LogLevel.Information)}.");
}
logger.Info(ErrorCode.SiloInitializing, "-------------- Initializing silo on host {0} MachineName {1} at {2}, gen {3} --------------",
this.siloDetails.DnsHostName, Environment.MachineName, localEndpoint, this.siloDetails.SiloAddress.Generation);
logger.Info(ErrorCode.SiloInitConfig, "Starting silo {0}", name);
try
{
grainFactory = Services.GetRequiredService<GrainFactory>();
}
catch (InvalidOperationException exc)
{
logger.Error(ErrorCode.SiloStartError, "Exception during Silo.Start, GrainFactory was not registered in Dependency Injection container", exc);
throw;
}
// Performance metrics
siloStatistics = Services.GetRequiredService<SiloStatisticsManager>();
runtimeClient = Services.GetRequiredService<InsideRuntimeClient>();
// Initialize the message center
messageCenter = Services.GetRequiredService<MessageCenter>();
messageCenter.SniffIncomingMessage = runtimeClient.SniffIncomingMessage;
// Now the router/directory service
// This has to come after the message center //; note that it then gets injected back into the message center.;
localGrainDirectory = Services.GetRequiredService<LocalGrainDirectory>();
// Now the consistent ring provider
RingProvider = Services.GetRequiredService<IConsistentRingProvider>();
catalog = Services.GetRequiredService<Catalog>();
siloStatusOracle = Services.GetRequiredService<ISiloStatusOracle>();
this.membershipService = Services.GetRequiredService<IMembershipService>();
this.SystemStatus = SystemStatus.Created;
StringValueStatistic.FindOrCreate(StatisticNames.SILO_START_TIME,
() => LogFormatter.PrintDate(startTime)); // this will help troubleshoot production deployment when looking at MDS logs.
this.siloLifecycle = this.Services.GetRequiredService<ISiloLifecycleSubject>();
// register all lifecycle participants
IEnumerable<ILifecycleParticipant<ISiloLifecycle>> lifecycleParticipants = this.Services.GetServices<ILifecycleParticipant<ISiloLifecycle>>();
foreach(ILifecycleParticipant<ISiloLifecycle> participant in lifecycleParticipants)
{
participant?.Participate(this.siloLifecycle);
}
// register all named lifecycle participants
IKeyedServiceCollection<string, ILifecycleParticipant<ISiloLifecycle>> namedLifecycleParticipantCollection = this.Services.GetService<IKeyedServiceCollection<string,ILifecycleParticipant<ISiloLifecycle>>>();
foreach (ILifecycleParticipant<ISiloLifecycle> participant in namedLifecycleParticipantCollection
?.GetServices(this.Services)
?.Select(s => s.GetService(this.Services)))
{
participant?.Participate(this.siloLifecycle);
}
// add self to lifecycle
this.Participate(this.siloLifecycle);
logger.Info(ErrorCode.SiloInitializingFinished, "-------------- Started silo {0}, ConsistentHashCode {1:X} --------------", SiloAddress.ToLongString(), SiloAddress.GetConsistentHashCode());
}
/// <summary>
/// Starts the silo.
/// </summary>
/// <param name="cancellationToken">A cancellation token which can be used to cancel the operation.</param>
/// <returns>A <see cref="Task"/> representing the operation.</returns>
public async Task StartAsync(CancellationToken cancellationToken)
{
// SystemTarget for provider init calls
this.lifecycleSchedulingSystemTarget = Services.GetRequiredService<LifecycleSchedulingSystemTarget>();
this.fallbackScheduler = Services.GetRequiredService<FallbackSystemTarget>();
RegisterSystemTarget(lifecycleSchedulingSystemTarget);
try
{
await this.lifecycleSchedulingSystemTarget.WorkItemGroup.QueueTask(() => this.siloLifecycle.OnStart(cancellationToken), lifecycleSchedulingSystemTarget);
}
catch (Exception exc)
{
logger.Error(ErrorCode.SiloStartError, "Exception during Silo.Start", exc);
throw;
}
}
private void CreateSystemTargets()
{
var siloControl = ActivatorUtilities.CreateInstance<SiloControl>(Services);
RegisterSystemTarget(siloControl);
RegisterSystemTarget(Services.GetRequiredService<DeploymentLoadPublisher>());
RegisterSystemTarget(LocalGrainDirectory.RemoteGrainDirectory);
RegisterSystemTarget(LocalGrainDirectory.CacheValidator);
this.RegisterSystemTarget(this.Services.GetRequiredService<ClientDirectory>());
if (this.membershipService is SystemTarget)
{
RegisterSystemTarget((SystemTarget)this.membershipService);
}
}
private void InjectDependencies()
{
catalog.SiloStatusOracle = this.siloStatusOracle;
this.siloStatusOracle.SubscribeToSiloStatusEvents(localGrainDirectory);
// consistentRingProvider is not a system target per say, but it behaves like the localGrainDirectory, so it is here
this.siloStatusOracle.SubscribeToSiloStatusEvents((ISiloStatusListener)RingProvider);
this.siloStatusOracle.SubscribeToSiloStatusEvents(Services.GetRequiredService<DeploymentLoadPublisher>());
var reminderTable = Services.GetService<IReminderTable>();
if (reminderTable != null)
{
logger.Info($"Creating reminder grain service for type={reminderTable.GetType()}");
// Start the reminder service system target
var timerFactory = this.Services.GetRequiredService<IAsyncTimerFactory>();
reminderService = new LocalReminderService(this, reminderTable, this.initTimeout, this.loggerFactory, timerFactory);
RegisterSystemTarget((SystemTarget)reminderService);
}
// SystemTarget for provider init calls
this.fallbackScheduler = Services.GetRequiredService<FallbackSystemTarget>();
RegisterSystemTarget(fallbackScheduler);
}
private Task OnRuntimeInitializeStart(CancellationToken ct)
{
lock (lockable)
{
if (!this.SystemStatus.Equals(SystemStatus.Created))
throw new InvalidOperationException(String.Format("Calling Silo.Start() on a silo which is not in the Created state. This silo is in the {0} state.", this.SystemStatus));
this.SystemStatus = SystemStatus.Starting;
}
logger.Info(ErrorCode.SiloStarting, "Silo Start()");
return Task.CompletedTask;
}
private void StartTaskWithPerfAnalysis(string taskName, Action task, Stopwatch stopWatch)
{
stopWatch.Restart();
task.Invoke();
stopWatch.Stop();
this.logger.Info(ErrorCode.SiloStartPerfMeasure, $"{taskName} took {stopWatch.ElapsedMilliseconds} Milliseconds to finish");
}
private async Task StartAsyncTaskWithPerfAnalysis(string taskName, Func<Task> task, Stopwatch stopWatch)
{
stopWatch.Restart();
await task.Invoke();
stopWatch.Stop();
this.logger.Info(ErrorCode.SiloStartPerfMeasure, $"{taskName} took {stopWatch.ElapsedMilliseconds} Milliseconds to finish");
}
private Task OnRuntimeServicesStart(CancellationToken ct)
{
//TODO: Setup all (or as many as possible) of the class started in this call to work directly with lifecyce
var stopWatch = Stopwatch.StartNew();
StartTaskWithPerfAnalysis("Start local grain directory", LocalGrainDirectory.Start, stopWatch);
// This has to follow the above steps that start the runtime components
CreateSystemTargets();
InjectDependencies();
return Task.CompletedTask;
}
private async Task OnRuntimeGrainServicesStart(CancellationToken ct)
{
var stopWatch = Stopwatch.StartNew();
// Load and init grain services before silo becomes active.
await StartAsyncTaskWithPerfAnalysis("Init grain services",
() => CreateGrainServices(), stopWatch);
try
{
StatisticsOptions statisticsOptions = Services.GetRequiredService<IOptions<StatisticsOptions>>().Value;
StartTaskWithPerfAnalysis("Start silo statistics", () => this.siloStatistics.Start(statisticsOptions), stopWatch);
logger.Debug("Silo statistics manager started successfully.");
// Finally, initialize the deployment load collector, for grains with load-based placement
await StartAsyncTaskWithPerfAnalysis("Start deployment load collector", StartDeploymentLoadCollector, stopWatch);
async Task StartDeploymentLoadCollector()
{
var deploymentLoadPublisher = Services.GetRequiredService<DeploymentLoadPublisher>();
await deploymentLoadPublisher.WorkItemGroup.QueueTask(deploymentLoadPublisher.Start, deploymentLoadPublisher)
.WithTimeout(this.initTimeout, $"Starting DeploymentLoadPublisher failed due to timeout {initTimeout}");
logger.Debug("Silo deployment load publisher started successfully.");
}
// Start background timer tick to watch for platform execution stalls, such as when GC kicks in
var healthCheckParticipants = this.Services.GetService<IEnumerable<IHealthCheckParticipant>>().ToList();
this.platformWatchdog = new Watchdog(statisticsOptions.LogWriteInterval, healthCheckParticipants, this.loggerFactory.CreateLogger<Watchdog>());
this.platformWatchdog.Start();
if (this.logger.IsEnabled(LogLevel.Debug)) { logger.Debug("Silo platform watchdog started successfully."); }
}
catch (Exception exc)
{
this.SafeExecute(() => this.logger.Error(ErrorCode.Runtime_Error_100330, String.Format("Error starting silo {0}. Going to FastKill().", this.SiloAddress), exc));
throw;
}
if (logger.IsEnabled(LogLevel.Debug)) { logger.Debug("Silo.Start complete: System status = {0}", this.SystemStatus); }
}
private Task OnBecomeActiveStart(CancellationToken ct)
{
this.SystemStatus = SystemStatus.Running;
return Task.CompletedTask;
}
private async Task OnActiveStart(CancellationToken ct)
{
var stopWatch = Stopwatch.StartNew();
if (this.reminderService != null)
{
await StartAsyncTaskWithPerfAnalysis("Start reminder service", StartReminderService, stopWatch);
async Task StartReminderService()
{
// so, we have the view of the membership in the consistentRingProvider. We can start the reminder service
this.reminderServiceContext = (this.reminderService as IGrainContext) ?? this.fallbackScheduler;
await this.reminderServiceContext.QueueTask(this.reminderService.Start)
.WithTimeout(this.initTimeout, $"Starting ReminderService failed due to timeout {initTimeout}");
this.logger.Debug("Reminder service started successfully.");
}
}
foreach (var grainService in grainServices)
{
await StartGrainService(grainService);
}
}
private async Task CreateGrainServices()
{
var grainServices = this.Services.GetServices<IGrainService>();
foreach (var grainService in grainServices)
{
await RegisterGrainService(grainService);
}
}
private async Task RegisterGrainService(IGrainService service)
{
var grainService = (GrainService)service;
RegisterSystemTarget(grainService);
grainServices.Add(grainService);
await grainService.QueueTask(() => grainService.Init(Services)).WithTimeout(this.initTimeout, $"GrainService Initializing failed due to timeout {initTimeout}");
logger.Info($"Grain Service {service.GetType().FullName} registered successfully.");
}
private async Task StartGrainService(IGrainService service)
{
var grainService = (GrainService)service;
await grainService.QueueTask(grainService.Start).WithTimeout(this.initTimeout, $"Starting GrainService failed due to timeout {initTimeout}");
logger.Info($"Grain Service {service.GetType().FullName} started successfully.");
}
/// <summary>
/// Gracefully stop the run time system only, but not the application.
/// Applications requests would be abruptly terminated, while the internal system state gracefully stopped and saved as much as possible.
/// Grains are not deactivated.
/// </summary>
public void Stop()
{
var cancellationSource = new CancellationTokenSource();
cancellationSource.Cancel();
StopAsync(cancellationSource.Token).GetAwaiter().GetResult();
}
/// <summary>
/// Gracefully stop the run time system and the application.
/// All grains will be properly deactivated.
/// All in-flight applications requests would be awaited and finished gracefully.
/// </summary>
public void Shutdown()
{
var cancellationSource = new CancellationTokenSource(this.stopTimeout);
StopAsync(cancellationSource.Token).GetAwaiter().GetResult();
}
/// <summary>
/// Gracefully stop the run time system only, but not the application.
/// Applications requests would be abruptly terminated, while the internal system state gracefully stopped and saved as much as possible.
/// </summary>
/// <param name="cancellationToken">
/// A cancellation token which can be used to promptly terminate the silo.
/// </param>
/// <returns>A <see cref="Task"/> representing the operation.</returns>
public async Task StopAsync(CancellationToken cancellationToken)
{
logger.LogInformation((int)ErrorCode.SiloShuttingDown, "Silo shutting down");
bool gracefully = !cancellationToken.IsCancellationRequested;
bool stopAlreadyInProgress = false;
lock (lockable)
{
if (this.SystemStatus.Equals(SystemStatus.Stopping) ||
this.SystemStatus.Equals(SystemStatus.ShuttingDown) ||
this.SystemStatus.Equals(SystemStatus.Terminated))
{
stopAlreadyInProgress = true;
// Drop through to wait below
}
else if (!this.SystemStatus.Equals(SystemStatus.Running))
{
throw new InvalidOperationException($"Attempted to stop a silo which is not in the {nameof(SystemStatus.Running)} state. This silo is in the {this.SystemStatus} state.");
}
else
{
if (gracefully)
this.SystemStatus = SystemStatus.ShuttingDown;
else
this.SystemStatus = SystemStatus.Stopping;
}
}
if (stopAlreadyInProgress)
{
logger.Info(ErrorCode.SiloStopInProgress, "Silo termination is in progress - Will wait for it to finish");
var pause = TimeSpan.FromSeconds(1);
while (!this.SystemStatus.Equals(SystemStatus.Terminated))
{
logger.Info(ErrorCode.WaitingForSiloStop, "Waiting {0} for termination to complete", pause);
await Task.Delay(pause).ConfigureAwait(false);
}
await this.SiloTerminated.ConfigureAwait(false);
return;
}
try
{
await this.lifecycleSchedulingSystemTarget.QueueTask(() => this.siloLifecycle.OnStop(cancellationToken)).ConfigureAwait(false);
}
finally
{
// Signal to all awaiters that the silo has terminated.
logger.LogInformation((int)ErrorCode.SiloShutDown, "Silo shutdown completed");
await Task.Run(() => this.siloTerminatedTask.TrySetResult(0)).ConfigureAwait(false);
}
}
private Task OnRuntimeServicesStop(CancellationToken ct)
{
if (this.isFastKilledNeeded || ct.IsCancellationRequested) // No time for this
return Task.CompletedTask;
// Start rejecting all silo to silo application messages
SafeExecute(messageCenter.BlockApplicationMessages);
return Task.CompletedTask;
}
private Task OnRuntimeInitializeStop(CancellationToken ct)
{
if (platformWatchdog != null)
{
SafeExecute(platformWatchdog.Stop); // Silo may be dying before platformWatchdog was set up
}
SafeExecute(messageCenter.Stop);
SafeExecute(siloStatistics.Stop);
SafeExecute(() => this.SystemStatus = SystemStatus.Terminated);
return Task.CompletedTask;
}
private async Task OnBecomeActiveStop(CancellationToken ct)
{
if (this.isFastKilledNeeded)
return;
bool gracefully = !ct.IsCancellationRequested;
try
{
if (gracefully)
{
// Stop LocalGrainDirectory
var resolver = new TaskCompletionSource<bool>(TaskCreationOptions.RunContinuationsAsynchronously);
localGrainDirectory.CacheValidator.WorkItemGroup.QueueAction(() =>
{
try
{
localGrainDirectory.Stop();
resolver.TrySetResult(true);
}
catch (Exception exc)
{
resolver.TrySetException(exc);
}
});
await resolver.Task;
try
{
await catalog.DeactivateAllActivations().WithCancellation(ct);
}
catch (Exception exception)
{
logger.LogError(exception, "Error deactivating activations");
}
// Wait for all queued message sent to OutboundMessageQueue before MessageCenter stop and OutboundMessageQueue stop.
await Task.WhenAny(Task.Delay(waitForMessageToBeQueuedForOutbound), ct.WhenCancelled());
}
}
catch (Exception exc)
{
logger.LogError(
(int)ErrorCode.SiloFailedToStopMembership,
exc,
"Failed to shutdown gracefully. About to terminate ungracefully");
this.isFastKilledNeeded = true;
}
// Stop the gateway
SafeExecute(messageCenter.StopAcceptingClientMessages);
}
private async Task OnActiveStop(CancellationToken ct)
{
if (this.isFastKilledNeeded || ct.IsCancellationRequested)
return;
if (this.messageCenter.Gateway != null)
{
await lifecycleSchedulingSystemTarget
.QueueTask(() => this.messageCenter.Gateway.SendStopSendMessages(this.grainFactory))
.WithCancellation(ct, "Sending gateway disconnection requests failed because the task was cancelled");
}
if (reminderService != null)
{
await reminderServiceContext
.QueueTask(reminderService.Stop)
.WithCancellation(ct, "Stopping ReminderService failed because the task was cancelled");
}
foreach (var grainService in grainServices)
{
await grainService
.QueueTask(grainService.Stop)
.WithCancellation(ct, "Stopping GrainService failed because the task was cancelled");
if (this.logger.IsEnabled(LogLevel.Debug))
{
logger.Debug(
"{GrainServiceType} Grain Service with Id {GrainServiceId} stopped successfully.",
grainService.GetType().FullName,
grainService.GetPrimaryKeyLong(out string ignored));
}
}
}
private void SafeExecute(Action action)
{
Utils.SafeExecute(action, logger, "Silo.Stop");
}
internal void RegisterSystemTarget(SystemTarget target) => this.catalog.RegisterSystemTarget(target);
/// <inheritdoc/>
public override string ToString()
{
return localGrainDirectory.ToString();
}
private void Participate(ISiloLifecycle lifecycle)
{
lifecycle.Subscribe<Silo>(ServiceLifecycleStage.RuntimeInitialize, (ct) => Task.Run(() => OnRuntimeInitializeStart(ct)), (ct) => Task.Run(() => OnRuntimeInitializeStop(ct)));
lifecycle.Subscribe<Silo>(ServiceLifecycleStage.RuntimeServices, (ct) => Task.Run(() => OnRuntimeServicesStart(ct)), (ct) => Task.Run(() => OnRuntimeServicesStop(ct)));
lifecycle.Subscribe<Silo>(ServiceLifecycleStage.RuntimeGrainServices, (ct) => Task.Run(() => OnRuntimeGrainServicesStart(ct)));
lifecycle.Subscribe<Silo>(ServiceLifecycleStage.BecomeActive, (ct) => Task.Run(() => OnBecomeActiveStart(ct)), (ct) => Task.Run(() => OnBecomeActiveStop(ct)));
lifecycle.Subscribe<Silo>(ServiceLifecycleStage.Active, (ct) => Task.Run(() => OnActiveStart(ct)), (ct) => Task.Run(() => OnActiveStop(ct)));
}
}
// A dummy system target for fallback scheduler
internal class FallbackSystemTarget : SystemTarget
{
public FallbackSystemTarget(ILocalSiloDetails localSiloDetails, ILoggerFactory loggerFactory)
: base(Constants.FallbackSystemTargetType, localSiloDetails.SiloAddress, loggerFactory)
{
}
}
// A dummy system target for fallback scheduler
internal class LifecycleSchedulingSystemTarget : SystemTarget
{
public LifecycleSchedulingSystemTarget(ILocalSiloDetails localSiloDetails, ILoggerFactory loggerFactory)
: base(Constants.LifecycleSchedulingSystemTargetType, localSiloDetails.SiloAddress, loggerFactory)
{
}
}
}
| |
/*
* Copyright (c) Citrix Systems, Inc.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1) Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2) Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
* FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
* COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
* SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
* STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
* OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using Newtonsoft.Json;
namespace XenAPI
{
/// <summary>
/// Represents a patch stored on a server
/// First published in XenServer 4.0.
/// </summary>
public partial class Host_patch : XenObject<Host_patch>
{
#region Constructors
public Host_patch()
{
}
public Host_patch(string uuid,
string name_label,
string name_description,
string version,
XenRef<Host> host,
bool applied,
DateTime timestamp_applied,
long size,
XenRef<Pool_patch> pool_patch,
Dictionary<string, string> other_config)
{
this.uuid = uuid;
this.name_label = name_label;
this.name_description = name_description;
this.version = version;
this.host = host;
this.applied = applied;
this.timestamp_applied = timestamp_applied;
this.size = size;
this.pool_patch = pool_patch;
this.other_config = other_config;
}
/// <summary>
/// Creates a new Host_patch from a Hashtable.
/// Note that the fields not contained in the Hashtable
/// will be created with their default values.
/// </summary>
/// <param name="table"></param>
public Host_patch(Hashtable table)
: this()
{
UpdateFrom(table);
}
/// <summary>
/// Creates a new Host_patch from a Proxy_Host_patch.
/// </summary>
/// <param name="proxy"></param>
public Host_patch(Proxy_Host_patch proxy)
{
UpdateFrom(proxy);
}
#endregion
/// <summary>
/// Updates each field of this instance with the value of
/// the corresponding field of a given Host_patch.
/// </summary>
public override void UpdateFrom(Host_patch update)
{
uuid = update.uuid;
name_label = update.name_label;
name_description = update.name_description;
version = update.version;
host = update.host;
applied = update.applied;
timestamp_applied = update.timestamp_applied;
size = update.size;
pool_patch = update.pool_patch;
other_config = update.other_config;
}
internal void UpdateFrom(Proxy_Host_patch proxy)
{
uuid = proxy.uuid == null ? null : proxy.uuid;
name_label = proxy.name_label == null ? null : proxy.name_label;
name_description = proxy.name_description == null ? null : proxy.name_description;
version = proxy.version == null ? null : proxy.version;
host = proxy.host == null ? null : XenRef<Host>.Create(proxy.host);
applied = (bool)proxy.applied;
timestamp_applied = proxy.timestamp_applied;
size = proxy.size == null ? 0 : long.Parse(proxy.size);
pool_patch = proxy.pool_patch == null ? null : XenRef<Pool_patch>.Create(proxy.pool_patch);
other_config = proxy.other_config == null ? null : Maps.convert_from_proxy_string_string(proxy.other_config);
}
public Proxy_Host_patch ToProxy()
{
Proxy_Host_patch result_ = new Proxy_Host_patch();
result_.uuid = uuid ?? "";
result_.name_label = name_label ?? "";
result_.name_description = name_description ?? "";
result_.version = version ?? "";
result_.host = host ?? "";
result_.applied = applied;
result_.timestamp_applied = timestamp_applied;
result_.size = size.ToString();
result_.pool_patch = pool_patch ?? "";
result_.other_config = Maps.convert_to_proxy_string_string(other_config);
return result_;
}
/// <summary>
/// Given a Hashtable with field-value pairs, it updates the fields of this Host_patch
/// with the values listed in the Hashtable. Note that only the fields contained
/// in the Hashtable will be updated and the rest will remain the same.
/// </summary>
/// <param name="table"></param>
public void UpdateFrom(Hashtable table)
{
if (table.ContainsKey("uuid"))
uuid = Marshalling.ParseString(table, "uuid");
if (table.ContainsKey("name_label"))
name_label = Marshalling.ParseString(table, "name_label");
if (table.ContainsKey("name_description"))
name_description = Marshalling.ParseString(table, "name_description");
if (table.ContainsKey("version"))
version = Marshalling.ParseString(table, "version");
if (table.ContainsKey("host"))
host = Marshalling.ParseRef<Host>(table, "host");
if (table.ContainsKey("applied"))
applied = Marshalling.ParseBool(table, "applied");
if (table.ContainsKey("timestamp_applied"))
timestamp_applied = Marshalling.ParseDateTime(table, "timestamp_applied");
if (table.ContainsKey("size"))
size = Marshalling.ParseLong(table, "size");
if (table.ContainsKey("pool_patch"))
pool_patch = Marshalling.ParseRef<Pool_patch>(table, "pool_patch");
if (table.ContainsKey("other_config"))
other_config = Maps.convert_from_proxy_string_string(Marshalling.ParseHashTable(table, "other_config"));
}
public bool DeepEquals(Host_patch other)
{
if (ReferenceEquals(null, other))
return false;
if (ReferenceEquals(this, other))
return true;
return Helper.AreEqual2(this._uuid, other._uuid) &&
Helper.AreEqual2(this._name_label, other._name_label) &&
Helper.AreEqual2(this._name_description, other._name_description) &&
Helper.AreEqual2(this._version, other._version) &&
Helper.AreEqual2(this._host, other._host) &&
Helper.AreEqual2(this._applied, other._applied) &&
Helper.AreEqual2(this._timestamp_applied, other._timestamp_applied) &&
Helper.AreEqual2(this._size, other._size) &&
Helper.AreEqual2(this._pool_patch, other._pool_patch) &&
Helper.AreEqual2(this._other_config, other._other_config);
}
internal static List<Host_patch> ProxyArrayToObjectList(Proxy_Host_patch[] input)
{
var result = new List<Host_patch>();
foreach (var item in input)
result.Add(new Host_patch(item));
return result;
}
public override string SaveChanges(Session session, string opaqueRef, Host_patch server)
{
if (opaqueRef == null)
{
System.Diagnostics.Debug.Assert(false, "Cannot create instances of this type on the server");
return "";
}
else
{
if (!Helper.AreEqual2(_other_config, server._other_config))
{
Host_patch.set_other_config(session, opaqueRef, _other_config);
}
return null;
}
}
/// <summary>
/// Get a record containing the current state of the given host_patch.
/// First published in XenServer 4.0.
/// Deprecated since XenServer 7.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
[Deprecated("XenServer 7.1")]
public static Host_patch get_record(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_record(session.opaque_ref, _host_patch);
else
return new Host_patch(session.proxy.host_patch_get_record(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Get a reference to the host_patch instance with the specified UUID.
/// First published in XenServer 4.0.
/// Deprecated since XenServer 7.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_uuid">UUID of object to return</param>
[Deprecated("XenServer 7.1")]
public static XenRef<Host_patch> get_by_uuid(Session session, string _uuid)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_by_uuid(session.opaque_ref, _uuid);
else
return XenRef<Host_patch>.Create(session.proxy.host_patch_get_by_uuid(session.opaque_ref, _uuid ?? "").parse());
}
/// <summary>
/// Get all the host_patch instances with the given label.
/// First published in XenServer 4.0.
/// Deprecated since XenServer 7.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_label">label of object to return</param>
[Deprecated("XenServer 7.1")]
public static List<XenRef<Host_patch>> get_by_name_label(Session session, string _label)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_by_name_label(session.opaque_ref, _label);
else
return XenRef<Host_patch>.Create(session.proxy.host_patch_get_by_name_label(session.opaque_ref, _label ?? "").parse());
}
/// <summary>
/// Get the uuid field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static string get_uuid(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_uuid(session.opaque_ref, _host_patch);
else
return session.proxy.host_patch_get_uuid(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Get the name/label field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static string get_name_label(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_name_label(session.opaque_ref, _host_patch);
else
return session.proxy.host_patch_get_name_label(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Get the name/description field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static string get_name_description(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_name_description(session.opaque_ref, _host_patch);
else
return session.proxy.host_patch_get_name_description(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Get the version field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static string get_version(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_version(session.opaque_ref, _host_patch);
else
return session.proxy.host_patch_get_version(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Get the host field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static XenRef<Host> get_host(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_host(session.opaque_ref, _host_patch);
else
return XenRef<Host>.Create(session.proxy.host_patch_get_host(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Get the applied field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static bool get_applied(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_applied(session.opaque_ref, _host_patch);
else
return (bool)session.proxy.host_patch_get_applied(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Get the timestamp_applied field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static DateTime get_timestamp_applied(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_timestamp_applied(session.opaque_ref, _host_patch);
else
return session.proxy.host_patch_get_timestamp_applied(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Get the size field of the given host_patch.
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static long get_size(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_size(session.opaque_ref, _host_patch);
else
return long.Parse(session.proxy.host_patch_get_size(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Get the pool_patch field of the given host_patch.
/// First published in XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static XenRef<Pool_patch> get_pool_patch(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_pool_patch(session.opaque_ref, _host_patch);
else
return XenRef<Pool_patch>.Create(session.proxy.host_patch_get_pool_patch(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Get the other_config field of the given host_patch.
/// First published in XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
public static Dictionary<string, string> get_other_config(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_other_config(session.opaque_ref, _host_patch);
else
return Maps.convert_from_proxy_string_string(session.proxy.host_patch_get_other_config(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Set the other_config field of the given host_patch.
/// First published in XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
/// <param name="_other_config">New value to set</param>
public static void set_other_config(Session session, string _host_patch, Dictionary<string, string> _other_config)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.host_patch_set_other_config(session.opaque_ref, _host_patch, _other_config);
else
session.proxy.host_patch_set_other_config(session.opaque_ref, _host_patch ?? "", Maps.convert_to_proxy_string_string(_other_config)).parse();
}
/// <summary>
/// Add the given key-value pair to the other_config field of the given host_patch.
/// First published in XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
/// <param name="_key">Key to add</param>
/// <param name="_value">Value to add</param>
public static void add_to_other_config(Session session, string _host_patch, string _key, string _value)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.host_patch_add_to_other_config(session.opaque_ref, _host_patch, _key, _value);
else
session.proxy.host_patch_add_to_other_config(session.opaque_ref, _host_patch ?? "", _key ?? "", _value ?? "").parse();
}
/// <summary>
/// Remove the given key and its corresponding value from the other_config field of the given host_patch. If the key is not in that Map, then do nothing.
/// First published in XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
/// <param name="_key">Key to remove</param>
public static void remove_from_other_config(Session session, string _host_patch, string _key)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.host_patch_remove_from_other_config(session.opaque_ref, _host_patch, _key);
else
session.proxy.host_patch_remove_from_other_config(session.opaque_ref, _host_patch ?? "", _key ?? "").parse();
}
/// <summary>
/// Destroy the specified host patch, removing it from the disk. This does NOT reverse the patch
/// First published in XenServer 4.0.
/// Deprecated since XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
[Deprecated("XenServer 4.1")]
public static void destroy(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
session.JsonRpcClient.host_patch_destroy(session.opaque_ref, _host_patch);
else
session.proxy.host_patch_destroy(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Destroy the specified host patch, removing it from the disk. This does NOT reverse the patch
/// First published in XenServer 4.0.
/// Deprecated since XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
[Deprecated("XenServer 4.1")]
public static XenRef<Task> async_destroy(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.async_host_patch_destroy(session.opaque_ref, _host_patch);
else
return XenRef<Task>.Create(session.proxy.async_host_patch_destroy(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Apply the selected patch and return its output
/// First published in XenServer 4.0.
/// Deprecated since XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
[Deprecated("XenServer 4.1")]
public static string apply(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_apply(session.opaque_ref, _host_patch);
else
return session.proxy.host_patch_apply(session.opaque_ref, _host_patch ?? "").parse();
}
/// <summary>
/// Apply the selected patch and return its output
/// First published in XenServer 4.0.
/// Deprecated since XenServer 4.1.
/// </summary>
/// <param name="session">The session</param>
/// <param name="_host_patch">The opaque_ref of the given host_patch</param>
[Deprecated("XenServer 4.1")]
public static XenRef<Task> async_apply(Session session, string _host_patch)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.async_host_patch_apply(session.opaque_ref, _host_patch);
else
return XenRef<Task>.Create(session.proxy.async_host_patch_apply(session.opaque_ref, _host_patch ?? "").parse());
}
/// <summary>
/// Return a list of all the host_patchs known to the system.
/// First published in XenServer 4.0.
/// Deprecated since XenServer 7.1.
/// </summary>
/// <param name="session">The session</param>
[Deprecated("XenServer 7.1")]
public static List<XenRef<Host_patch>> get_all(Session session)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_all(session.opaque_ref);
else
return XenRef<Host_patch>.Create(session.proxy.host_patch_get_all(session.opaque_ref).parse());
}
/// <summary>
/// Get all the host_patch Records at once, in a single XML RPC call
/// First published in XenServer 4.0.
/// </summary>
/// <param name="session">The session</param>
public static Dictionary<XenRef<Host_patch>, Host_patch> get_all_records(Session session)
{
if (session.JsonRpcClient != null)
return session.JsonRpcClient.host_patch_get_all_records(session.opaque_ref);
else
return XenRef<Host_patch>.Create<Proxy_Host_patch>(session.proxy.host_patch_get_all_records(session.opaque_ref).parse());
}
/// <summary>
/// Unique identifier/object reference
/// </summary>
public virtual string uuid
{
get { return _uuid; }
set
{
if (!Helper.AreEqual(value, _uuid))
{
_uuid = value;
Changed = true;
NotifyPropertyChanged("uuid");
}
}
}
private string _uuid = "";
/// <summary>
/// a human-readable name
/// </summary>
public virtual string name_label
{
get { return _name_label; }
set
{
if (!Helper.AreEqual(value, _name_label))
{
_name_label = value;
Changed = true;
NotifyPropertyChanged("name_label");
}
}
}
private string _name_label = "";
/// <summary>
/// a notes field containing human-readable description
/// </summary>
public virtual string name_description
{
get { return _name_description; }
set
{
if (!Helper.AreEqual(value, _name_description))
{
_name_description = value;
Changed = true;
NotifyPropertyChanged("name_description");
}
}
}
private string _name_description = "";
/// <summary>
/// Patch version number
/// </summary>
public virtual string version
{
get { return _version; }
set
{
if (!Helper.AreEqual(value, _version))
{
_version = value;
Changed = true;
NotifyPropertyChanged("version");
}
}
}
private string _version = "";
/// <summary>
/// Host the patch relates to
/// </summary>
[JsonConverter(typeof(XenRefConverter<Host>))]
public virtual XenRef<Host> host
{
get { return _host; }
set
{
if (!Helper.AreEqual(value, _host))
{
_host = value;
Changed = true;
NotifyPropertyChanged("host");
}
}
}
private XenRef<Host> _host = new XenRef<Host>(Helper.NullOpaqueRef);
/// <summary>
/// True if the patch has been applied
/// </summary>
public virtual bool applied
{
get { return _applied; }
set
{
if (!Helper.AreEqual(value, _applied))
{
_applied = value;
Changed = true;
NotifyPropertyChanged("applied");
}
}
}
private bool _applied;
/// <summary>
/// Time the patch was applied
/// </summary>
[JsonConverter(typeof(XenDateTimeConverter))]
public virtual DateTime timestamp_applied
{
get { return _timestamp_applied; }
set
{
if (!Helper.AreEqual(value, _timestamp_applied))
{
_timestamp_applied = value;
Changed = true;
NotifyPropertyChanged("timestamp_applied");
}
}
}
private DateTime _timestamp_applied;
/// <summary>
/// Size of the patch
/// </summary>
public virtual long size
{
get { return _size; }
set
{
if (!Helper.AreEqual(value, _size))
{
_size = value;
Changed = true;
NotifyPropertyChanged("size");
}
}
}
private long _size;
/// <summary>
/// The patch applied
/// First published in XenServer 4.1.
/// </summary>
[JsonConverter(typeof(XenRefConverter<Pool_patch>))]
public virtual XenRef<Pool_patch> pool_patch
{
get { return _pool_patch; }
set
{
if (!Helper.AreEqual(value, _pool_patch))
{
_pool_patch = value;
Changed = true;
NotifyPropertyChanged("pool_patch");
}
}
}
private XenRef<Pool_patch> _pool_patch = new XenRef<Pool_patch>(Helper.NullOpaqueRef);
/// <summary>
/// additional configuration
/// First published in XenServer 4.1.
/// </summary>
[JsonConverter(typeof(StringStringMapConverter))]
public virtual Dictionary<string, string> other_config
{
get { return _other_config; }
set
{
if (!Helper.AreEqual(value, _other_config))
{
_other_config = value;
Changed = true;
NotifyPropertyChanged("other_config");
}
}
}
private Dictionary<string, string> _other_config = new Dictionary<string, string>() {};
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gax = Google.Api.Gax;
using gcvtv = Google.Cloud.Video.Transcoder.V1Beta1;
using sys = System;
namespace Google.Cloud.Video.Transcoder.V1Beta1
{
/// <summary>Resource name for the <c>Job</c> resource.</summary>
public sealed partial class JobName : gax::IResourceName, sys::IEquatable<JobName>
{
/// <summary>The possible contents of <see cref="JobName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>projects/{project}/locations/{location}/jobs/{job}</c>.
/// </summary>
ProjectLocationJob = 1,
}
private static gax::PathTemplate s_projectLocationJob = new gax::PathTemplate("projects/{project}/locations/{location}/jobs/{job}");
/// <summary>Creates a <see cref="JobName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="JobName"/> containing the provided <paramref name="unparsedResourceName"/>.
/// </returns>
public static JobName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new JobName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="JobName"/> with the pattern <c>projects/{project}/locations/{location}/jobs/{job}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobId">The <c>Job</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="JobName"/> constructed from the provided ids.</returns>
public static JobName FromProjectLocationJob(string projectId, string locationId, string jobId) =>
new JobName(ResourceNameType.ProjectLocationJob, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), jobId: gax::GaxPreconditions.CheckNotNullOrEmpty(jobId, nameof(jobId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="JobName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobs/{job}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobId">The <c>Job</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="JobName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobs/{job}</c>.
/// </returns>
public static string Format(string projectId, string locationId, string jobId) =>
FormatProjectLocationJob(projectId, locationId, jobId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="JobName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobs/{job}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobId">The <c>Job</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="JobName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobs/{job}</c>.
/// </returns>
public static string FormatProjectLocationJob(string projectId, string locationId, string jobId) =>
s_projectLocationJob.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(jobId, nameof(jobId)));
/// <summary>Parses the given resource name string into a new <see cref="JobName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/jobs/{job}</c></description></item>
/// </list>
/// </remarks>
/// <param name="jobName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="JobName"/> if successful.</returns>
public static JobName Parse(string jobName) => Parse(jobName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="JobName"/> instance; optionally allowing an
/// unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/jobs/{job}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="jobName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="JobName"/> if successful.</returns>
public static JobName Parse(string jobName, bool allowUnparsed) =>
TryParse(jobName, allowUnparsed, out JobName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>Tries to parse the given resource name string into a new <see cref="JobName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/jobs/{job}</c></description></item>
/// </list>
/// </remarks>
/// <param name="jobName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="JobName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string jobName, out JobName result) => TryParse(jobName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="JobName"/> instance; optionally allowing
/// an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item><description><c>projects/{project}/locations/{location}/jobs/{job}</c></description></item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="jobName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="JobName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string jobName, bool allowUnparsed, out JobName result)
{
gax::GaxPreconditions.CheckNotNull(jobName, nameof(jobName));
gax::TemplatedResourceName resourceName;
if (s_projectLocationJob.TryParseName(jobName, out resourceName))
{
result = FromProjectLocationJob(resourceName[0], resourceName[1], resourceName[2]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(jobName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private JobName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string jobId = null, string locationId = null, string projectId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
JobId = jobId;
LocationId = locationId;
ProjectId = projectId;
}
/// <summary>
/// Constructs a new instance of a <see cref="JobName"/> class from the component parts of pattern
/// <c>projects/{project}/locations/{location}/jobs/{job}</c>
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobId">The <c>Job</c> ID. Must not be <c>null</c> or empty.</param>
public JobName(string projectId, string locationId, string jobId) : this(ResourceNameType.ProjectLocationJob, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), jobId: gax::GaxPreconditions.CheckNotNullOrEmpty(jobId, nameof(jobId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>Job</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string JobId { get; }
/// <summary>
/// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string LocationId { get; }
/// <summary>
/// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string ProjectId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.ProjectLocationJob: return s_projectLocationJob.Expand(ProjectId, LocationId, JobId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as JobName);
/// <inheritdoc/>
public bool Equals(JobName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(JobName a, JobName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(JobName a, JobName b) => !(a == b);
}
/// <summary>Resource name for the <c>JobTemplate</c> resource.</summary>
public sealed partial class JobTemplateName : gax::IResourceName, sys::IEquatable<JobTemplateName>
{
/// <summary>The possible contents of <see cref="JobTemplateName"/>.</summary>
public enum ResourceNameType
{
/// <summary>An unparsed resource name.</summary>
Unparsed = 0,
/// <summary>
/// A resource name with pattern <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>.
/// </summary>
ProjectLocationJobTemplate = 1,
}
private static gax::PathTemplate s_projectLocationJobTemplate = new gax::PathTemplate("projects/{project}/locations/{location}/jobTemplates/{job_template}");
/// <summary>Creates a <see cref="JobTemplateName"/> containing an unparsed resource name.</summary>
/// <param name="unparsedResourceName">The unparsed resource name. Must not be <c>null</c>.</param>
/// <returns>
/// A new instance of <see cref="JobTemplateName"/> containing the provided
/// <paramref name="unparsedResourceName"/>.
/// </returns>
public static JobTemplateName FromUnparsed(gax::UnparsedResourceName unparsedResourceName) =>
new JobTemplateName(ResourceNameType.Unparsed, gax::GaxPreconditions.CheckNotNull(unparsedResourceName, nameof(unparsedResourceName)));
/// <summary>
/// Creates a <see cref="JobTemplateName"/> with the pattern
/// <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobTemplateId">The <c>JobTemplate</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>A new instance of <see cref="JobTemplateName"/> constructed from the provided ids.</returns>
public static JobTemplateName FromProjectLocationJobTemplate(string projectId, string locationId, string jobTemplateId) =>
new JobTemplateName(ResourceNameType.ProjectLocationJobTemplate, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), jobTemplateId: gax::GaxPreconditions.CheckNotNullOrEmpty(jobTemplateId, nameof(jobTemplateId)));
/// <summary>
/// Formats the IDs into the string representation of this <see cref="JobTemplateName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobTemplateId">The <c>JobTemplate</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="JobTemplateName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>.
/// </returns>
public static string Format(string projectId, string locationId, string jobTemplateId) =>
FormatProjectLocationJobTemplate(projectId, locationId, jobTemplateId);
/// <summary>
/// Formats the IDs into the string representation of this <see cref="JobTemplateName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>.
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobTemplateId">The <c>JobTemplate</c> ID. Must not be <c>null</c> or empty.</param>
/// <returns>
/// The string representation of this <see cref="JobTemplateName"/> with pattern
/// <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>.
/// </returns>
public static string FormatProjectLocationJobTemplate(string projectId, string locationId, string jobTemplateId) =>
s_projectLocationJobTemplate.Expand(gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), gax::GaxPreconditions.CheckNotNullOrEmpty(jobTemplateId, nameof(jobTemplateId)));
/// <summary>Parses the given resource name string into a new <see cref="JobTemplateName"/> instance.</summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c></description>
/// </item>
/// </list>
/// </remarks>
/// <param name="jobTemplateName">The resource name in string form. Must not be <c>null</c>.</param>
/// <returns>The parsed <see cref="JobTemplateName"/> if successful.</returns>
public static JobTemplateName Parse(string jobTemplateName) => Parse(jobTemplateName, false);
/// <summary>
/// Parses the given resource name string into a new <see cref="JobTemplateName"/> instance; optionally allowing
/// an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c></description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="jobTemplateName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <returns>The parsed <see cref="JobTemplateName"/> if successful.</returns>
public static JobTemplateName Parse(string jobTemplateName, bool allowUnparsed) =>
TryParse(jobTemplateName, allowUnparsed, out JobTemplateName result) ? result : throw new sys::ArgumentException("The given resource-name matches no pattern.");
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="JobTemplateName"/> instance.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c></description>
/// </item>
/// </list>
/// </remarks>
/// <param name="jobTemplateName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="result">
/// When this method returns, the parsed <see cref="JobTemplateName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string jobTemplateName, out JobTemplateName result) =>
TryParse(jobTemplateName, false, out result);
/// <summary>
/// Tries to parse the given resource name string into a new <see cref="JobTemplateName"/> instance; optionally
/// allowing an unparseable resource name.
/// </summary>
/// <remarks>
/// To parse successfully, the resource name must be formatted as one of the following:
/// <list type="bullet">
/// <item>
/// <description><c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c></description>
/// </item>
/// </list>
/// Or may be in any format if <paramref name="allowUnparsed"/> is <c>true</c>.
/// </remarks>
/// <param name="jobTemplateName">The resource name in string form. Must not be <c>null</c>.</param>
/// <param name="allowUnparsed">
/// If <c>true</c> will successfully store an unparseable resource name into the <see cref="UnparsedResource"/>
/// property; otherwise will throw an <see cref="sys::ArgumentException"/> if an unparseable resource name is
/// specified.
/// </param>
/// <param name="result">
/// When this method returns, the parsed <see cref="JobTemplateName"/>, or <c>null</c> if parsing failed.
/// </param>
/// <returns><c>true</c> if the name was parsed successfully; <c>false</c> otherwise.</returns>
public static bool TryParse(string jobTemplateName, bool allowUnparsed, out JobTemplateName result)
{
gax::GaxPreconditions.CheckNotNull(jobTemplateName, nameof(jobTemplateName));
gax::TemplatedResourceName resourceName;
if (s_projectLocationJobTemplate.TryParseName(jobTemplateName, out resourceName))
{
result = FromProjectLocationJobTemplate(resourceName[0], resourceName[1], resourceName[2]);
return true;
}
if (allowUnparsed)
{
if (gax::UnparsedResourceName.TryParse(jobTemplateName, out gax::UnparsedResourceName unparsedResourceName))
{
result = FromUnparsed(unparsedResourceName);
return true;
}
}
result = null;
return false;
}
private JobTemplateName(ResourceNameType type, gax::UnparsedResourceName unparsedResourceName = null, string jobTemplateId = null, string locationId = null, string projectId = null)
{
Type = type;
UnparsedResource = unparsedResourceName;
JobTemplateId = jobTemplateId;
LocationId = locationId;
ProjectId = projectId;
}
/// <summary>
/// Constructs a new instance of a <see cref="JobTemplateName"/> class from the component parts of pattern
/// <c>projects/{project}/locations/{location}/jobTemplates/{job_template}</c>
/// </summary>
/// <param name="projectId">The <c>Project</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="locationId">The <c>Location</c> ID. Must not be <c>null</c> or empty.</param>
/// <param name="jobTemplateId">The <c>JobTemplate</c> ID. Must not be <c>null</c> or empty.</param>
public JobTemplateName(string projectId, string locationId, string jobTemplateId) : this(ResourceNameType.ProjectLocationJobTemplate, projectId: gax::GaxPreconditions.CheckNotNullOrEmpty(projectId, nameof(projectId)), locationId: gax::GaxPreconditions.CheckNotNullOrEmpty(locationId, nameof(locationId)), jobTemplateId: gax::GaxPreconditions.CheckNotNullOrEmpty(jobTemplateId, nameof(jobTemplateId)))
{
}
/// <summary>The <see cref="ResourceNameType"/> of the contained resource name.</summary>
public ResourceNameType Type { get; }
/// <summary>
/// The contained <see cref="gax::UnparsedResourceName"/>. Only non-<c>null</c> if this instance contains an
/// unparsed resource name.
/// </summary>
public gax::UnparsedResourceName UnparsedResource { get; }
/// <summary>
/// The <c>JobTemplate</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string JobTemplateId { get; }
/// <summary>
/// The <c>Location</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string LocationId { get; }
/// <summary>
/// The <c>Project</c> ID. Will not be <c>null</c>, unless this instance contains an unparsed resource name.
/// </summary>
public string ProjectId { get; }
/// <summary>Whether this instance contains a resource name with a known pattern.</summary>
public bool IsKnownPattern => Type != ResourceNameType.Unparsed;
/// <summary>The string representation of the resource name.</summary>
/// <returns>The string representation of the resource name.</returns>
public override string ToString()
{
switch (Type)
{
case ResourceNameType.Unparsed: return UnparsedResource.ToString();
case ResourceNameType.ProjectLocationJobTemplate: return s_projectLocationJobTemplate.Expand(ProjectId, LocationId, JobTemplateId);
default: throw new sys::InvalidOperationException("Unrecognized resource-type.");
}
}
/// <summary>Returns a hash code for this resource name.</summary>
public override int GetHashCode() => ToString().GetHashCode();
/// <inheritdoc/>
public override bool Equals(object obj) => Equals(obj as JobTemplateName);
/// <inheritdoc/>
public bool Equals(JobTemplateName other) => ToString() == other?.ToString();
/// <inheritdoc/>
public static bool operator ==(JobTemplateName a, JobTemplateName b) => ReferenceEquals(a, b) || (a?.Equals(b) ?? false);
/// <inheritdoc/>
public static bool operator !=(JobTemplateName a, JobTemplateName b) => !(a == b);
}
public partial class Job
{
/// <summary>
/// <see cref="gcvtv::JobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcvtv::JobName JobName
{
get => string.IsNullOrEmpty(Name) ? null : gcvtv::JobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class JobTemplate
{
/// <summary>
/// <see cref="gcvtv::JobTemplateName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcvtv::JobTemplateName JobTemplateName
{
get => string.IsNullOrEmpty(Name) ? null : gcvtv::JobTemplateName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Xml.Xsl.XsltOld
{
using System.Diagnostics;
using System.IO;
using System.Globalization;
using System.Collections;
using System.Xml.XPath;
using System.Xml.Xsl.Runtime;
using MS.Internal.Xml.XPath;
using System.Reflection;
using System.Security;
using System.Runtime.Versioning;
internal class XsltCompileContext : XsltContext
{
private InputScopeManager _manager;
private Processor _processor;
// storage for the functions
private static Hashtable s_FunctionTable = CreateFunctionTable();
private static IXsltContextFunction s_FuncNodeSet = new FuncNodeSet();
private const string f_NodeSet = "node-set";
internal XsltCompileContext(InputScopeManager manager, Processor processor) : base(/*dummy*/false)
{
_manager = manager;
_processor = processor;
}
internal XsltCompileContext() : base(/*dummy*/ false) { }
internal void Recycle()
{
_manager = null;
_processor = null;
}
internal void Reinitialize(InputScopeManager manager, Processor processor)
{
_manager = manager;
_processor = processor;
}
public override int CompareDocument(string baseUri, string nextbaseUri)
{
return string.Compare(baseUri, nextbaseUri, StringComparison.Ordinal);
}
// Namespace support
public override string DefaultNamespace
{
get { return string.Empty; }
}
public override string LookupNamespace(string prefix)
{
return _manager.ResolveXPathNamespace(prefix);
}
// --------------------------- XsltContext -------------------
// Resolving variables and functions
public override IXsltContextVariable ResolveVariable(string prefix, string name)
{
string namespaceURI = this.LookupNamespace(prefix);
XmlQualifiedName qname = new XmlQualifiedName(name, namespaceURI);
IXsltContextVariable variable = _manager.VariableScope.ResolveVariable(qname);
if (variable == null)
{
throw XsltException.Create(SR.Xslt_InvalidVariable, qname.ToString());
}
return variable;
}
internal object EvaluateVariable(VariableAction variable)
{
object result = _processor.GetVariableValue(variable);
if (result == null && !variable.IsGlobal)
{
// This was uninitialized local variable. May be we have sutable global var too?
VariableAction global = _manager.VariableScope.ResolveGlobalVariable(variable.Name);
if (global != null)
{
result = _processor.GetVariableValue(global);
}
}
if (result == null)
{
throw XsltException.Create(SR.Xslt_InvalidVariable, variable.Name.ToString());
}
return result;
}
// Whitespace stripping support
public override bool Whitespace
{
get { return _processor.Stylesheet.Whitespace; }
}
public override bool PreserveWhitespace(XPathNavigator node)
{
node = node.Clone();
node.MoveToParent();
return _processor.Stylesheet.PreserveWhiteSpace(_processor, node);
}
private MethodInfo FindBestMethod(MethodInfo[] methods, bool ignoreCase, bool publicOnly, string name, XPathResultType[] argTypes)
{
int length = methods.Length;
int free = 0;
// restrict search to methods with the same name and requiested protection attribute
for (int i = 0; i < length; i++)
{
if (string.Equals(name, methods[i].Name, ignoreCase ? StringComparison.OrdinalIgnoreCase : StringComparison.Ordinal))
{
if (!publicOnly || methods[i].GetBaseDefinition().IsPublic)
{
methods[free++] = methods[i];
}
}
}
length = free;
if (length == 0)
{
// this is the only place we returning null in this function
return null;
}
if (argTypes == null)
{
// without arg types we can't do more detailed search
return methods[0];
}
// restrict search by number of parameters
free = 0;
for (int i = 0; i < length; i++)
{
if (methods[i].GetParameters().Length == argTypes.Length)
{
methods[free++] = methods[i];
}
}
length = free;
if (length <= 1)
{
// 0 -- not method found. We have to return non-null and let it fail with correct exception on call.
// 1 -- no reason to continue search anyway.
return methods[0];
}
// restrict search by parameters type
free = 0;
for (int i = 0; i < length; i++)
{
bool match = true;
ParameterInfo[] parameters = methods[i].GetParameters();
for (int par = 0; par < parameters.Length; par++)
{
XPathResultType required = argTypes[par];
if (required == XPathResultType.Any)
{
continue; // Any means we don't know type and can't discriminate by it
}
XPathResultType actual = GetXPathType(parameters[par].ParameterType);
if (
actual != required &&
actual != XPathResultType.Any // actual arg is object and we can pass everithing here.
)
{
match = false;
break;
}
}
if (match)
{
methods[free++] = methods[i];
}
}
length = free;
return methods[0];
}
private const BindingFlags bindingFlags = BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static;
private IXsltContextFunction GetExtentionMethod(string ns, string name, XPathResultType[] argTypes, out object extension)
{
FuncExtension result = null;
extension = _processor.GetScriptObject(ns);
if (extension != null)
{
MethodInfo method = FindBestMethod(extension.GetType().GetMethods(bindingFlags), /*ignoreCase:*/true, /*publicOnly:*/false, name, argTypes);
if (method != null)
{
result = new FuncExtension(extension, method);
}
return result;
}
extension = _processor.GetExtensionObject(ns);
if (extension != null)
{
MethodInfo method = FindBestMethod(extension.GetType().GetMethods(bindingFlags), /*ignoreCase:*/false, /*publicOnly:*/true, name, argTypes);
if (method != null)
{
result = new FuncExtension(extension, method);
}
return result;
}
return null;
}
public override IXsltContextFunction ResolveFunction(string prefix, string name, XPathResultType[] argTypes)
{
IXsltContextFunction func = null;
if (prefix.Length == 0)
{
func = s_FunctionTable[name] as IXsltContextFunction;
}
else
{
string ns = this.LookupNamespace(prefix);
if (ns == XmlReservedNs.NsMsxsl && name == f_NodeSet)
{
func = s_FuncNodeSet;
}
else
{
object extension;
func = GetExtentionMethod(ns, name, argTypes, out extension);
if (extension == null)
{
throw XsltException.Create(SR.Xslt_ScriptInvalidPrefix, prefix); // BugBug: It's better to say that method 'name' not found
}
}
}
if (func == null)
{
throw XsltException.Create(SR.Xslt_UnknownXsltFunction, name);
}
if (argTypes.Length < func.Minargs || func.Maxargs < argTypes.Length)
{
throw XsltException.Create(SR.Xslt_WrongNumberArgs, name, argTypes.Length.ToString(CultureInfo.InvariantCulture));
}
return func;
}
//
// Xslt Function Extensions to XPath
//
private Uri ComposeUri(string thisUri, string baseUri)
{
Debug.Assert(thisUri != null && baseUri != null);
XmlResolver resolver = _processor.Resolver;
Uri uriBase = null;
if (baseUri.Length != 0)
{
uriBase = resolver.ResolveUri(null, baseUri);
}
return resolver.ResolveUri(uriBase, thisUri);
}
private XPathNodeIterator Document(object arg0, string baseUri)
{
XPathNodeIterator it = arg0 as XPathNodeIterator;
if (it != null)
{
ArrayList list = new ArrayList();
Hashtable documents = new Hashtable();
while (it.MoveNext())
{
Uri uri = ComposeUri(it.Current.Value, baseUri ?? it.Current.BaseURI);
if (!documents.ContainsKey(uri))
{
documents.Add(uri, null);
list.Add(_processor.GetNavigator(uri));
}
}
return new XPathArrayIterator(list);
}
else
{
return new XPathSingletonIterator(
_processor.GetNavigator(
ComposeUri(XmlConvert.ToXPathString(arg0), baseUri ?? _manager.Navigator.BaseURI)
)
);
}
}
private Hashtable BuildKeyTable(Key key, XPathNavigator root)
{
Hashtable keyTable = new Hashtable();
string matchStr = _processor.GetQueryExpression(key.MatchKey);
Query matchExpr = _processor.GetCompiledQuery(key.MatchKey);
Query useExpr = _processor.GetCompiledQuery(key.UseKey);
XPathNodeIterator sel = root.SelectDescendants(XPathNodeType.All, /*matchSelf:*/ false);
while (sel.MoveNext())
{
XPathNavigator node = sel.Current;
EvaluateKey(node, matchExpr, matchStr, useExpr, keyTable);
if (node.MoveToFirstAttribute())
{
do
{
EvaluateKey(node, matchExpr, matchStr, useExpr, keyTable);
} while (node.MoveToNextAttribute());
node.MoveToParent();
}
}
return keyTable;
}
private static void AddKeyValue(Hashtable keyTable, string key, XPathNavigator value, bool checkDuplicates)
{
ArrayList list = (ArrayList)keyTable[key];
if (list == null)
{
list = new ArrayList();
keyTable.Add(key, list);
}
else
{
Debug.Assert(
value.ComparePosition((XPathNavigator)list[list.Count - 1]) != XmlNodeOrder.Before,
"The way we traversing nodes should garantees node-order"
);
if (checkDuplicates)
{
// it's posible that this value already was assosiated with current node
// but if this happened the node is last in the list of values.
if (value.ComparePosition((XPathNavigator)list[list.Count - 1]) == XmlNodeOrder.Same)
{
return;
}
}
else
{
Debug.Assert(
value.ComparePosition((XPathNavigator)list[list.Count - 1]) != XmlNodeOrder.Same,
"checkDuplicates == false : We can't have duplicates"
);
}
}
list.Add(value.Clone());
}
private static void EvaluateKey(XPathNavigator node, Query matchExpr, string matchStr, Query useExpr, Hashtable keyTable)
{
try
{
if (matchExpr.MatchNode(node) == null)
{
return;
}
}
catch (XPathException)
{
throw XsltException.Create(SR.Xslt_InvalidPattern, matchStr);
}
object result = useExpr.Evaluate(new XPathSingletonIterator(node, /*moved:*/true));
XPathNodeIterator it = result as XPathNodeIterator;
if (it != null)
{
bool checkDuplicates = false;
while (it.MoveNext())
{
AddKeyValue(keyTable, /*key:*/it.Current.Value, /*value:*/node, checkDuplicates);
checkDuplicates = true;
}
}
else
{
string key = XmlConvert.ToXPathString(result);
AddKeyValue(keyTable, key, /*value:*/node, /*checkDuplicates:*/ false);
}
}
private DecimalFormat ResolveFormatName(string formatName)
{
string ns = string.Empty, local = string.Empty;
if (formatName != null)
{
string prefix;
PrefixQName.ParseQualifiedName(formatName, out prefix, out local);
ns = LookupNamespace(prefix);
}
DecimalFormat formatInfo = _processor.RootAction.GetDecimalFormat(new XmlQualifiedName(local, ns));
if (formatInfo == null)
{
if (formatName != null)
{
throw XsltException.Create(SR.Xslt_NoDecimalFormat, formatName);
}
formatInfo = new DecimalFormat(new NumberFormatInfo(), '#', '0', ';');
}
return formatInfo;
}
// see http://www.w3.org/TR/xslt#function-element-available
private bool ElementAvailable(string qname)
{
string name, prefix;
PrefixQName.ParseQualifiedName(qname, out prefix, out name);
string ns = _manager.ResolveXmlNamespace(prefix);
// msxsl:script - is not an "instruction" so we return false for it.
if (ns == XmlReservedNs.NsXslt)
{
return (
name == "apply-imports" ||
name == "apply-templates" ||
name == "attribute" ||
name == "call-template" ||
name == "choose" ||
name == "comment" ||
name == "copy" ||
name == "copy-of" ||
name == "element" ||
name == "fallback" ||
name == "for-each" ||
name == "if" ||
name == "message" ||
name == "number" ||
name == "processing-instruction" ||
name == "text" ||
name == "value-of" ||
name == "variable"
);
}
return false;
}
// see: http://www.w3.org/TR/xslt#function-function-available
private bool FunctionAvailable(string qname)
{
string name, prefix;
PrefixQName.ParseQualifiedName(qname, out prefix, out name);
string ns = LookupNamespace(prefix);
if (ns == XmlReservedNs.NsMsxsl)
{
return name == f_NodeSet;
}
else if (ns.Length == 0)
{
return (
// It'll be better to get this information from XPath
name == "last" ||
name == "position" ||
name == "name" ||
name == "namespace-uri" ||
name == "local-name" ||
name == "count" ||
name == "id" ||
name == "string" ||
name == "concat" ||
name == "starts-with" ||
name == "contains" ||
name == "substring-before" ||
name == "substring-after" ||
name == "substring" ||
name == "string-length" ||
name == "normalize-space" ||
name == "translate" ||
name == "boolean" ||
name == "not" ||
name == "true" ||
name == "false" ||
name == "lang" ||
name == "number" ||
name == "sum" ||
name == "floor" ||
name == "ceiling" ||
name == "round" ||
// XSLT functions:
(s_FunctionTable[name] != null && name != "unparsed-entity-uri")
);
}
else
{
// Is this script or extention function?
object extension;
return GetExtentionMethod(ns, name, /*argTypes*/null, out extension) != null;
}
}
private XPathNodeIterator Current()
{
XPathNavigator nav = _processor.Current;
if (nav != null)
{
return new XPathSingletonIterator(nav.Clone());
}
return XPathEmptyIterator.Instance;
}
private string SystemProperty(string qname)
{
string result = string.Empty;
string prefix;
string local;
PrefixQName.ParseQualifiedName(qname, out prefix, out local);
// verify the prefix corresponds to the Xslt namespace
string urn = LookupNamespace(prefix);
if (urn == XmlReservedNs.NsXslt)
{
if (local == "version")
{
result = "1";
}
else if (local == "vendor")
{
result = "Microsoft";
}
else if (local == "vendor-url")
{
result = "http://www.microsoft.com";
}
}
else
{
if (urn == null && prefix != null)
{
// if prefix exist it has to be mapped to namespace.
// Can it be "" here ?
throw XsltException.Create(SR.Xslt_InvalidPrefix, prefix);
}
return string.Empty;
}
return result;
}
public static XPathResultType GetXPathType(Type type)
{
switch (Type.GetTypeCode(type))
{
case TypeCode.String:
return XPathResultType.String;
case TypeCode.Boolean:
return XPathResultType.Boolean;
case TypeCode.Object:
if (typeof(XPathNavigator).IsAssignableFrom(type) || typeof(IXPathNavigable).IsAssignableFrom(type))
{
return XPathResultType.Navigator;
}
if (typeof(XPathNodeIterator).IsAssignableFrom(type))
{
return XPathResultType.NodeSet;
}
// sdub: It be better to check that type is realy object and otherwise return XPathResultType.Error
return XPathResultType.Any;
case TypeCode.DateTime:
return XPathResultType.Error;
default: /* all numeric types */
return XPathResultType.Number;
}
}
// ---------------- Xslt Function Implementations -------------------
//
private static Hashtable CreateFunctionTable()
{
Hashtable ft = new Hashtable(10);
{
ft["current"] = new FuncCurrent();
ft["unparsed-entity-uri"] = new FuncUnEntityUri();
ft["generate-id"] = new FuncGenerateId();
ft["system-property"] = new FuncSystemProp();
ft["element-available"] = new FuncElementAvailable();
ft["function-available"] = new FuncFunctionAvailable();
ft["document"] = new FuncDocument();
ft["key"] = new FuncKey();
ft["format-number"] = new FuncFormatNumber();
}
return ft;
}
// + IXsltContextFunction
// + XsltFunctionImpl func. name, min/max args, return type args types
// FuncCurrent "current" 0 0 XPathResultType.NodeSet { }
// FuncUnEntityUri "unparsed-entity-uri" 1 1 XPathResultType.String { XPathResultType.String }
// FuncGenerateId "generate-id" 0 1 XPathResultType.String { XPathResultType.NodeSet }
// FuncSystemProp "system-property" 1 1 XPathResultType.String { XPathResultType.String }
// FuncElementAvailable "element-available" 1 1 XPathResultType.Boolean { XPathResultType.String }
// FuncFunctionAvailable "function-available" 1 1 XPathResultType.Boolean { XPathResultType.String }
// FuncDocument "document" 1 2 XPathResultType.NodeSet { XPathResultType.Any , XPathResultType.NodeSet }
// FuncKey "key" 2 2 XPathResultType.NodeSet { XPathResultType.String , XPathResultType.Any }
// FuncFormatNumber "format-number" 2 3 XPathResultType.String { XPathResultType.Number , XPathResultType.String, XPathResultType.String }
// FuncNodeSet "msxsl:node-set" 1 1 XPathResultType.NodeSet { XPathResultType.Navigator }
// FuncExtension
//
private abstract class XsltFunctionImpl : IXsltContextFunction
{
private int _minargs;
private int _maxargs;
private XPathResultType _returnType;
private XPathResultType[] _argTypes;
public XsltFunctionImpl() { }
public XsltFunctionImpl(int minArgs, int maxArgs, XPathResultType returnType, XPathResultType[] argTypes)
{
this.Init(minArgs, maxArgs, returnType, argTypes);
}
protected void Init(int minArgs, int maxArgs, XPathResultType returnType, XPathResultType[] argTypes)
{
_minargs = minArgs;
_maxargs = maxArgs;
_returnType = returnType;
_argTypes = argTypes;
}
public int Minargs { get { return _minargs; } }
public int Maxargs { get { return _maxargs; } }
public XPathResultType ReturnType { get { return _returnType; } }
public XPathResultType[] ArgTypes { get { return _argTypes; } }
public abstract object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext);
// static helper methods:
public static XPathNodeIterator ToIterator(object argument)
{
XPathNodeIterator it = argument as XPathNodeIterator;
if (it == null)
{
throw XsltException.Create(SR.Xslt_NoNodeSetConversion);
}
return it;
}
public static XPathNavigator ToNavigator(object argument)
{
XPathNavigator nav = argument as XPathNavigator;
if (nav == null)
{
throw XsltException.Create(SR.Xslt_NoNavigatorConversion);
}
return nav;
}
private static string IteratorToString(XPathNodeIterator it)
{
Debug.Assert(it != null);
if (it.MoveNext())
{
return it.Current.Value;
}
return string.Empty;
}
public static string ToString(object argument)
{
XPathNodeIterator it = argument as XPathNodeIterator;
if (it != null)
{
return IteratorToString(it);
}
else
{
return XmlConvert.ToXPathString(argument);
}
}
public static bool ToBoolean(object argument)
{
XPathNodeIterator it = argument as XPathNodeIterator;
if (it != null)
{
return Convert.ToBoolean(IteratorToString(it), CultureInfo.InvariantCulture);
}
XPathNavigator nav = argument as XPathNavigator;
if (nav != null)
{
return Convert.ToBoolean(nav.ToString(), CultureInfo.InvariantCulture);
}
return Convert.ToBoolean(argument, CultureInfo.InvariantCulture);
}
public static double ToNumber(object argument)
{
XPathNodeIterator it = argument as XPathNodeIterator;
if (it != null)
{
return XmlConvert.ToXPathDouble(IteratorToString(it));
}
XPathNavigator nav = argument as XPathNavigator;
if (nav != null)
{
return XmlConvert.ToXPathDouble(nav.ToString());
}
return XmlConvert.ToXPathDouble(argument);
}
private static object ToNumeric(object argument, Type type)
{
return Convert.ChangeType(ToNumber(argument), type, CultureInfo.InvariantCulture);
}
public static object ConvertToXPathType(object val, XPathResultType xt, Type type)
{
switch (xt)
{
case XPathResultType.String:
// Unfortunetely XPathResultType.String == XPathResultType.Navigator (This is wrong but cant be changed in Everett)
// Fortunetely we have typeCode hare so let's discriminate by typeCode
if (type == typeof(string))
{
return ToString(val);
}
else
{
return ToNavigator(val);
}
case XPathResultType.Number: return ToNumeric(val, type);
case XPathResultType.Boolean: return ToBoolean(val);
case XPathResultType.NodeSet: return ToIterator(val);
// case XPathResultType.Navigator : return ToNavigator(val);
case XPathResultType.Any:
case XPathResultType.Error:
return val;
default:
Debug.Fail("unexpected XPath type");
return val;
}
}
}
private class FuncCurrent : XsltFunctionImpl
{
public FuncCurrent() : base(0, 0, XPathResultType.NodeSet, Array.Empty<XPathResultType>()) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
return ((XsltCompileContext)xsltContext).Current();
}
}
private class FuncUnEntityUri : XsltFunctionImpl
{
public FuncUnEntityUri() : base(1, 1, XPathResultType.String, new XPathResultType[] { XPathResultType.String }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
throw XsltException.Create(SR.Xslt_UnsuppFunction, "unparsed-entity-uri");
}
}
private class FuncGenerateId : XsltFunctionImpl
{
public FuncGenerateId() : base(0, 1, XPathResultType.String, new XPathResultType[] { XPathResultType.NodeSet }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
if (args.Length > 0)
{
XPathNodeIterator it = ToIterator(args[0]);
if (it.MoveNext())
{
return it.Current.UniqueId;
}
else
{
// if empty nodeset, return empty string, otherwise return generated id
return string.Empty;
}
}
else
{
return docContext.UniqueId;
}
}
}
private class FuncSystemProp : XsltFunctionImpl
{
public FuncSystemProp() : base(1, 1, XPathResultType.String, new XPathResultType[] { XPathResultType.String }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
return ((XsltCompileContext)xsltContext).SystemProperty(ToString(args[0]));
}
}
// see http://www.w3.org/TR/xslt#function-element-available
private class FuncElementAvailable : XsltFunctionImpl
{
public FuncElementAvailable() : base(1, 1, XPathResultType.Boolean, new XPathResultType[] { XPathResultType.String }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
return ((XsltCompileContext)xsltContext).ElementAvailable(ToString(args[0]));
}
}
// see: http://www.w3.org/TR/xslt#function-function-available
private class FuncFunctionAvailable : XsltFunctionImpl
{
public FuncFunctionAvailable() : base(1, 1, XPathResultType.Boolean, new XPathResultType[] { XPathResultType.String }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
return ((XsltCompileContext)xsltContext).FunctionAvailable(ToString(args[0]));
}
}
private class FuncDocument : XsltFunctionImpl
{
public FuncDocument() : base(1, 2, XPathResultType.NodeSet, new XPathResultType[] { XPathResultType.Any, XPathResultType.NodeSet }) { }
// SxS: This method uses resource names read from source document and does not expose any resources to the caller.
// It's OK to suppress the SxS warning.
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
string baseUri = null;
if (args.Length == 2)
{
XPathNodeIterator it = ToIterator(args[1]);
if (it.MoveNext())
{
baseUri = it.Current.BaseURI;
}
else
{
// http://www.w3.org/1999/11/REC-xslt-19991116-errata (E14):
// It is an error if the second argument node-set is empty and the URI reference is relative; the XSLT processor may signal the error;
// if it does not signal an error, it must recover by returning an empty node-set.
baseUri = string.Empty; // call to Document will fail if args[0] is reletive.
}
}
try
{
return ((XsltCompileContext)xsltContext).Document(args[0], baseUri);
}
catch (Exception e)
{
if (!XmlException.IsCatchableException(e))
{
throw;
}
return XPathEmptyIterator.Instance;
}
}
}
private class FuncKey : XsltFunctionImpl
{
public FuncKey() : base(2, 2, XPathResultType.NodeSet, new XPathResultType[] { XPathResultType.String, XPathResultType.Any }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
XsltCompileContext xsltCompileContext = (XsltCompileContext)xsltContext;
string local, prefix;
PrefixQName.ParseQualifiedName(ToString(args[0]), out prefix, out local);
string ns = xsltContext.LookupNamespace(prefix);
XmlQualifiedName keyName = new XmlQualifiedName(local, ns);
XPathNavigator root = docContext.Clone();
root.MoveToRoot();
ArrayList resultCollection = null;
foreach (Key key in xsltCompileContext._processor.KeyList)
{
if (key.Name == keyName)
{
Hashtable keyTable = key.GetKeys(root);
if (keyTable == null)
{
keyTable = xsltCompileContext.BuildKeyTable(key, root);
key.AddKey(root, keyTable);
}
XPathNodeIterator it = args[1] as XPathNodeIterator;
if (it != null)
{
it = it.Clone();
while (it.MoveNext())
{
resultCollection = AddToList(resultCollection, (ArrayList)keyTable[it.Current.Value]);
}
}
else
{
resultCollection = AddToList(resultCollection, (ArrayList)keyTable[ToString(args[1])]);
}
}
}
if (resultCollection == null)
{
return XPathEmptyIterator.Instance;
}
else if (resultCollection[0] is XPathNavigator)
{
return new XPathArrayIterator(resultCollection);
}
else
{
return new XPathMultyIterator(resultCollection);
}
}
private static ArrayList AddToList(ArrayList resultCollection, ArrayList newList)
{
if (newList == null)
{
return resultCollection;
}
if (resultCollection == null)
{
return newList;
}
Debug.Assert(resultCollection.Count != 0);
Debug.Assert(newList.Count != 0);
if (!(resultCollection[0] is ArrayList))
{
// Transform resultCollection from ArrayList(XPathNavigator) to ArrayList(ArrayList(XPathNavigator))
Debug.Assert(resultCollection[0] is XPathNavigator);
ArrayList firstList = resultCollection;
resultCollection = new ArrayList();
resultCollection.Add(firstList);
}
resultCollection.Add(newList);
return resultCollection;
}
}
private class FuncFormatNumber : XsltFunctionImpl
{
public FuncFormatNumber() : base(2, 3, XPathResultType.String, new XPathResultType[] { XPathResultType.Number, XPathResultType.String, XPathResultType.String }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
DecimalFormat formatInfo = ((XsltCompileContext)xsltContext).ResolveFormatName(args.Length == 3 ? ToString(args[2]) : null);
return DecimalFormatter.Format(ToNumber(args[0]), ToString(args[1]), formatInfo);
}
}
private class FuncNodeSet : XsltFunctionImpl
{
public FuncNodeSet() : base(1, 1, XPathResultType.NodeSet, new XPathResultType[] { XPathResultType.Navigator }) { }
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
return new XPathSingletonIterator(ToNavigator(args[0]));
}
}
private class FuncExtension : XsltFunctionImpl
{
private object _extension;
private MethodInfo _method;
private Type[] _types;
public FuncExtension(object extension, MethodInfo method)
{
Debug.Assert(extension != null);
Debug.Assert(method != null);
_extension = extension;
_method = method;
XPathResultType returnType = GetXPathType(method.ReturnType);
ParameterInfo[] parameters = method.GetParameters();
int minArgs = parameters.Length;
int maxArgs = parameters.Length;
_types = new Type[parameters.Length];
XPathResultType[] argTypes = new XPathResultType[parameters.Length];
bool optionalParams = true; // we allow only last params be optional. Set false on the first non optional.
for (int i = parameters.Length - 1; 0 <= i; i--)
{ // Revers order is essential: counting optional parameters
_types[i] = parameters[i].ParameterType;
argTypes[i] = GetXPathType(parameters[i].ParameterType);
if (optionalParams)
{
if (parameters[i].IsOptional)
{
minArgs--;
}
else
{
optionalParams = false;
}
}
}
base.Init(minArgs, maxArgs, returnType, argTypes);
}
public override object Invoke(XsltContext xsltContext, object[] args, XPathNavigator docContext)
{
Debug.Assert(args.Length <= this.Minargs, "We cheking this on resolve time");
for (int i = args.Length - 1; 0 <= i; i--)
{
args[i] = ConvertToXPathType(args[i], this.ArgTypes[i], _types[i]);
}
return _method.Invoke(_extension, args);
}
}
}
}
| |
using System;
using Unity.Collections;
#if UNITY_EDITOR
using UnityEditor;
using UnityEditor.Rendering.LWRP;
#endif
using UnityEngine.Rendering.PostProcessing;
using UnityEngine.Experimental.GlobalIllumination;
using Lightmapping = UnityEngine.Experimental.GlobalIllumination.Lightmapping;
namespace UnityEngine.Rendering.LWRP
{
public sealed partial class LightweightRenderPipeline : RenderPipeline
{
static class PerFrameBuffer
{
public static int _GlossyEnvironmentColor;
public static int _SubtractiveShadowColor;
}
static class PerCameraBuffer
{
// TODO: This needs to account for stereo rendering
public static int _InvCameraViewProj;
public static int _ScaledScreenParams;
}
public const string k_ShaderTagName = "LightweightPipeline";
const string k_RenderCameraTag = "Render Camera";
public static float maxShadowBias
{
get => 10.0f;
}
public static float minRenderScale
{
get => 0.1f;
}
public static float maxRenderScale
{
get => 4.0f;
}
// Amount of Lights that can be shaded per object (in the for loop in the shader)
// This uses unity_4LightIndices to store an array of 4 light indices
public static int maxPerObjectLights
{
get => 4;
}
// Light data is stored in a constant buffer (uniform array)
// This value has to match MAX_VISIBLE_LIGHTS in Input.hlsl
public static int maxVisibleAdditionalLights
{
get => 16;
}
public static LightweightRenderPipelineAsset asset
{
get
{
return GraphicsSettings.renderPipelineAsset as LightweightRenderPipelineAsset;
}
}
public LightweightRenderPipeline(LightweightRenderPipelineAsset asset)
{
SetSupportedRenderingFeatures();
PerFrameBuffer._GlossyEnvironmentColor = Shader.PropertyToID("_GlossyEnvironmentColor");
PerFrameBuffer._SubtractiveShadowColor = Shader.PropertyToID("_SubtractiveShadowColor");
PerCameraBuffer._InvCameraViewProj = Shader.PropertyToID("_InvCameraViewProj");
PerCameraBuffer._ScaledScreenParams = Shader.PropertyToID("_ScaledScreenParams");
// Let engine know we have MSAA on for cases where we support MSAA backbuffer
if (QualitySettings.antiAliasing != asset.msaaSampleCount)
QualitySettings.antiAliasing = asset.msaaSampleCount;
Shader.globalRenderPipeline = "LightweightPipeline";
Lightmapping.SetDelegate(lightsDelegate);
CameraCaptureBridge.enabled = true;
}
protected override void Dispose(bool disposing)
{
base.Dispose(disposing);
Shader.globalRenderPipeline = "";
SupportedRenderingFeatures.active = new SupportedRenderingFeatures();
#if UNITY_EDITOR
SceneViewDrawMode.ResetDrawMode();
#endif
Lightmapping.ResetDelegate();
CameraCaptureBridge.enabled = false;
}
protected override void Render(ScriptableRenderContext renderContext, Camera[] cameras)
{
BeginFrameRendering(renderContext, cameras);
GraphicsSettings.lightsUseLinearIntensity = (QualitySettings.activeColorSpace == ColorSpace.Linear);
GraphicsSettings.useScriptableRenderPipelineBatching = asset.useSRPBatcher;
SetupPerFrameShaderConstants();
SortCameras(cameras);
foreach (Camera camera in cameras)
{
BeginCameraRendering(renderContext, camera);
UnityEngine.Experimental.VFX.VFXManager.ProcessCamera(camera); //Visual Effect Graph is not yet a required package but calling this method when there isn't any VisualEffect component has no effect (but needed for Camera sorting in Visual Effect Graph context)
RenderSingleCamera(renderContext, camera);
EndCameraRendering(renderContext, camera);
}
EndFrameRendering(renderContext, cameras);
}
public static void RenderSingleCamera(ScriptableRenderContext context, Camera camera)
{
if (!camera.TryGetCullingParameters(IsStereoEnabled(camera), out var cullingParameters))
return;
var settings = asset;
LWRPAdditionalCameraData additionalCameraData = null;
if (camera.cameraType == CameraType.Game || camera.cameraType == CameraType.VR)
additionalCameraData = camera.gameObject.GetComponent<LWRPAdditionalCameraData>();
InitializeCameraData(settings, camera, additionalCameraData, out var cameraData);
SetupPerCameraShaderConstants(cameraData);
ScriptableRenderer renderer = (additionalCameraData != null) ? additionalCameraData.scriptableRenderer : settings.scriptableRenderer;
if (renderer == null)
{
Debug.LogWarning(string.Format("Trying to render {0} with an invalid renderer. Camera rendering will be skipped.", camera.name));
return;
}
CommandBuffer cmd = CommandBufferPool.Get(camera.name);
using (new ProfilingSample(cmd, camera.name))
{
renderer.Clear();
renderer.SetupCullingParameters(ref cullingParameters, ref cameraData);
context.ExecuteCommandBuffer(cmd);
cmd.Clear();
#if UNITY_EDITOR
// Emit scene view UI
if (cameraData.isSceneViewCamera)
ScriptableRenderContext.EmitWorldGeometryForSceneView(camera);
#endif
var cullResults = context.Cull(ref cullingParameters);
InitializeRenderingData(settings, ref cameraData, ref cullResults, out var renderingData);
renderer.Setup(context, ref renderingData);
renderer.Execute(context, ref renderingData);
}
context.ExecuteCommandBuffer(cmd);
CommandBufferPool.Release(cmd);
context.Submit();
}
static void SetSupportedRenderingFeatures()
{
#if UNITY_EDITOR
SupportedRenderingFeatures.active = new SupportedRenderingFeatures()
{
reflectionProbeModes = SupportedRenderingFeatures.ReflectionProbeModes.None,
defaultMixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive,
mixedLightingModes = SupportedRenderingFeatures.LightmapMixedBakeModes.Subtractive | SupportedRenderingFeatures.LightmapMixedBakeModes.IndirectOnly,
lightmapBakeTypes = LightmapBakeType.Baked | LightmapBakeType.Mixed,
lightmapsModes = LightmapsMode.CombinedDirectional | LightmapsMode.NonDirectional,
lightProbeProxyVolumes = false,
motionVectors = false,
receiveShadows = false,
reflectionProbes = true
};
SceneViewDrawMode.SetupDrawMode();
#endif
}
static void InitializeCameraData(LightweightRenderPipelineAsset settings, Camera camera, LWRPAdditionalCameraData additionalCameraData, out CameraData cameraData)
{
const float kRenderScaleThreshold = 0.05f;
cameraData.camera = camera;
cameraData.isStereoEnabled = IsStereoEnabled(camera);
int msaaSamples = 1;
if (camera.allowMSAA && settings.msaaSampleCount > 1)
msaaSamples = (camera.targetTexture != null) ? camera.targetTexture.antiAliasing : settings.msaaSampleCount;
if (Camera.main == camera && camera.cameraType == CameraType.Game && camera.targetTexture == null)
{
bool msaaSampleCountHasChanged = false;
int currentQualitySettingsSampleCount = QualitySettings.antiAliasing;
if (currentQualitySettingsSampleCount != msaaSamples &&
!(currentQualitySettingsSampleCount == 0 && msaaSamples == 1))
{
msaaSampleCountHasChanged = true;
}
// There's no exposed API to control how a backbuffer is created with MSAA
// By settings antiAliasing we match what the amount of samples in camera data with backbuffer
// We only do this for the main camera and this only takes effect in the beginning of next frame.
// This settings should not be changed on a frame basis so that's fine.
QualitySettings.antiAliasing = msaaSamples;
if (cameraData.isStereoEnabled && msaaSampleCountHasChanged)
XR.XRDevice.UpdateEyeTextureMSAASetting();
}
cameraData.isSceneViewCamera = camera.cameraType == CameraType.SceneView;
cameraData.isHdrEnabled = camera.allowHDR && settings.supportsHDR;
cameraData.postProcessLayer = camera.GetComponent<PostProcessLayer>();
cameraData.postProcessEnabled = cameraData.postProcessLayer != null && cameraData.postProcessLayer.isActiveAndEnabled;
// Disables postprocessing in mobile VR. It's stable on mobile yet.
if (cameraData.isStereoEnabled && Application.isMobilePlatform)
cameraData.postProcessEnabled = false;
Rect cameraRect = camera.rect;
cameraData.isDefaultViewport = (!(Math.Abs(cameraRect.x) > 0.0f || Math.Abs(cameraRect.y) > 0.0f ||
Math.Abs(cameraRect.width) < 1.0f || Math.Abs(cameraRect.height) < 1.0f));
// If XR is enabled, use XR renderScale.
// Discard variations lesser than kRenderScaleThreshold.
// Scale is only enabled for gameview.
float usedRenderScale = XRGraphics.enabled ? XRGraphics.eyeTextureResolutionScale : settings.renderScale;
cameraData.renderScale = (Mathf.Abs(1.0f - usedRenderScale) < kRenderScaleThreshold) ? 1.0f : usedRenderScale;
cameraData.renderScale = (camera.cameraType == CameraType.Game) ? cameraData.renderScale : 1.0f;
bool anyShadowsEnabled = settings.supportsMainLightShadows || settings.supportsAdditionalLightShadows;
cameraData.maxShadowDistance = (anyShadowsEnabled) ? settings.shadowDistance : 0.0f;
if (additionalCameraData != null)
{
cameraData.maxShadowDistance = (additionalCameraData.renderShadows) ? cameraData.maxShadowDistance : 0.0f;
cameraData.requiresDepthTexture = additionalCameraData.requiresDepthTexture;
cameraData.requiresOpaqueTexture = additionalCameraData.requiresColorTexture;
}
else
{
cameraData.requiresDepthTexture = settings.supportsCameraDepthTexture;
cameraData.requiresOpaqueTexture = settings.supportsCameraOpaqueTexture;
}
cameraData.requiresDepthTexture |= cameraData.isSceneViewCamera || cameraData.postProcessEnabled;
var commonOpaqueFlags = SortingCriteria.CommonOpaque;
var noFrontToBackOpaqueFlags = SortingCriteria.SortingLayer | SortingCriteria.RenderQueue | SortingCriteria.OptimizeStateChanges | SortingCriteria.CanvasOrder;
bool hasHSRGPU = SystemInfo.hasHiddenSurfaceRemovalOnGPU;
bool canSkipFrontToBackSorting = (camera.opaqueSortMode == OpaqueSortMode.Default && hasHSRGPU) || camera.opaqueSortMode == OpaqueSortMode.NoDistanceSort;
cameraData.defaultOpaqueSortFlags = canSkipFrontToBackSorting ? noFrontToBackOpaqueFlags : commonOpaqueFlags;
cameraData.captureActions = CameraCaptureBridge.GetCaptureActions(camera);
cameraData.cameraTargetDescriptor = CreateRenderTextureDescriptor(camera, cameraData.renderScale,
cameraData.isStereoEnabled, cameraData.isHdrEnabled, msaaSamples);
}
static void InitializeRenderingData(LightweightRenderPipelineAsset settings, ref CameraData cameraData, ref CullingResults cullResults,
out RenderingData renderingData)
{
var visibleLights = cullResults.visibleLights;
int mainLightIndex = GetMainLightIndex(settings, visibleLights);
bool mainLightCastShadows = false;
bool additionalLightsCastShadows = false;
if (cameraData.maxShadowDistance > 0.0f)
{
mainLightCastShadows = (mainLightIndex != -1 && visibleLights[mainLightIndex].light != null &&
visibleLights[mainLightIndex].light.shadows != LightShadows.None);
// If additional lights are shaded per-pixel they cannot cast shadows
if (settings.additionalLightsRenderingMode == LightRenderingMode.PerPixel)
{
for (int i = 0; i < visibleLights.Length; ++i)
{
if (i == mainLightIndex)
continue;
Light light = visibleLights[i].light;
// LWRP doesn't support additional directional lights or point light shadows yet
if (visibleLights[i].lightType == LightType.Spot && light != null && light.shadows != LightShadows.None)
{
additionalLightsCastShadows = true;
break;
}
}
}
}
renderingData.cullResults = cullResults;
renderingData.cameraData = cameraData;
InitializeLightData(settings, visibleLights, mainLightIndex, out renderingData.lightData);
InitializeShadowData(settings, visibleLights, mainLightCastShadows, additionalLightsCastShadows && !renderingData.lightData.shadeAdditionalLightsPerVertex, out renderingData.shadowData);
renderingData.supportsDynamicBatching = settings.supportsDynamicBatching;
renderingData.perObjectData = GetPerObjectLightFlags(renderingData.lightData.additionalLightsCount);
bool platformNeedsToKillAlpha = Application.platform == RuntimePlatform.IPhonePlayer ||
Application.platform == RuntimePlatform.Android ||
Application.platform == RuntimePlatform.tvOS;
renderingData.killAlphaInFinalBlit = !Graphics.preserveFramebufferAlpha && platformNeedsToKillAlpha;
}
static void InitializeShadowData(LightweightRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights, bool mainLightCastShadows, bool additionalLightsCastShadows, out ShadowData shadowData)
{
m_ShadowBiasData.Clear();
for (int i = 0; i < visibleLights.Length; ++i)
{
Light light = visibleLights[i].light;
LWRPAdditionalLightData data =
(light != null) ? light.gameObject.GetComponent<LWRPAdditionalLightData>() : null;
if (data && !data.usePipelineSettings)
m_ShadowBiasData.Add(new Vector4(light.shadowBias, light.shadowNormalBias, 0.0f, 0.0f));
else
m_ShadowBiasData.Add(new Vector4(settings.shadowDepthBias, settings.shadowNormalBias, 0.0f, 0.0f));
}
shadowData.bias = m_ShadowBiasData;
// Until we can have keyword stripping forcing single cascade hard shadows on gles2
bool supportsScreenSpaceShadows = SystemInfo.graphicsDeviceType != GraphicsDeviceType.OpenGLES2;
shadowData.supportsMainLightShadows = settings.supportsMainLightShadows && mainLightCastShadows;
// we resolve shadows in screenspace when cascades are enabled to save ALU as computing cascade index + shadowCoord on fragment is expensive
shadowData.requiresScreenSpaceShadowResolve = shadowData.supportsMainLightShadows && supportsScreenSpaceShadows && settings.shadowCascadeOption != ShadowCascadesOption.NoCascades;
int shadowCascadesCount;
switch (settings.shadowCascadeOption)
{
case ShadowCascadesOption.FourCascades:
shadowCascadesCount = 4;
break;
case ShadowCascadesOption.TwoCascades:
shadowCascadesCount = 2;
break;
default:
shadowCascadesCount = 1;
break;
}
shadowData.mainLightShadowCascadesCount = (shadowData.requiresScreenSpaceShadowResolve) ? shadowCascadesCount : 1;
shadowData.mainLightShadowmapWidth = settings.mainLightShadowmapResolution;
shadowData.mainLightShadowmapHeight = settings.mainLightShadowmapResolution;
switch (shadowData.mainLightShadowCascadesCount)
{
case 1:
shadowData.mainLightShadowCascadesSplit = new Vector3(1.0f, 0.0f, 0.0f);
break;
case 2:
shadowData.mainLightShadowCascadesSplit = new Vector3(settings.cascade2Split, 1.0f, 0.0f);
break;
default:
shadowData.mainLightShadowCascadesSplit = settings.cascade4Split;
break;
}
shadowData.supportsAdditionalLightShadows = settings.supportsAdditionalLightShadows && additionalLightsCastShadows;
shadowData.additionalLightsShadowmapWidth = shadowData.additionalLightsShadowmapHeight = settings.additionalLightsShadowmapResolution;
shadowData.supportsSoftShadows = settings.supportsSoftShadows && (shadowData.supportsMainLightShadows || shadowData.supportsAdditionalLightShadows);
shadowData.shadowmapDepthBufferBits = 16;
}
static void InitializeLightData(LightweightRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights, int mainLightIndex, out LightData lightData)
{
int maxPerObjectAdditionalLights = LightweightRenderPipeline.maxPerObjectLights;
int maxVisibleAdditionalLights = LightweightRenderPipeline.maxVisibleAdditionalLights;
lightData.mainLightIndex = mainLightIndex;
if (settings.additionalLightsRenderingMode != LightRenderingMode.Disabled)
{
lightData.additionalLightsCount =
Math.Min((mainLightIndex != -1) ? visibleLights.Length - 1 : visibleLights.Length,
maxVisibleAdditionalLights);
lightData.maxPerObjectAdditionalLightsCount = Math.Min(settings.maxAdditionalLightsCount, maxPerObjectAdditionalLights);
}
else
{
lightData.additionalLightsCount = 0;
lightData.maxPerObjectAdditionalLightsCount = 0;
}
lightData.shadeAdditionalLightsPerVertex = settings.additionalLightsRenderingMode == LightRenderingMode.PerVertex;
lightData.visibleLights = visibleLights;
lightData.supportsMixedLighting = settings.supportsMixedLighting;
}
static PerObjectData GetPerObjectLightFlags(int additionalLightsCount)
{
var configuration = PerObjectData.ReflectionProbes | PerObjectData.Lightmaps | PerObjectData.LightProbe | PerObjectData.LightData | PerObjectData.OcclusionProbe;
if (additionalLightsCount > 0)
configuration |= PerObjectData.LightIndices;
return configuration;
}
// Main Light is always a directional light
static int GetMainLightIndex(LightweightRenderPipelineAsset settings, NativeArray<VisibleLight> visibleLights)
{
int totalVisibleLights = visibleLights.Length;
if (totalVisibleLights == 0 || settings.mainLightRenderingMode != LightRenderingMode.PerPixel)
return -1;
Light sunLight = RenderSettings.sun;
int brightestDirectionalLightIndex = -1;
float brightestLightIntensity = 0.0f;
for (int i = 0; i < totalVisibleLights; ++i)
{
VisibleLight currVisibleLight = visibleLights[i];
Light currLight = currVisibleLight.light;
// Particle system lights have the light property as null. We sort lights so all particles lights
// come last. Therefore, if first light is particle light then all lights are particle lights.
// In this case we either have no main light or already found it.
if (currLight == null)
break;
if (currLight == sunLight)
return i;
// In case no shadow light is present we will return the brightest directional light
if (currVisibleLight.lightType == LightType.Directional && currLight.intensity > brightestLightIntensity)
{
brightestLightIntensity = currLight.intensity;
brightestDirectionalLightIndex = i;
}
}
return brightestDirectionalLightIndex;
}
static void SetupPerFrameShaderConstants()
{
// When glossy reflections are OFF in the shader we set a constant color to use as indirect specular
SphericalHarmonicsL2 ambientSH = RenderSettings.ambientProbe;
Color linearGlossyEnvColor = new Color(ambientSH[0, 0], ambientSH[1, 0], ambientSH[2, 0]) * RenderSettings.reflectionIntensity;
Color glossyEnvColor = CoreUtils.ConvertLinearToActiveColorSpace(linearGlossyEnvColor);
Shader.SetGlobalVector(PerFrameBuffer._GlossyEnvironmentColor, glossyEnvColor);
// Used when subtractive mode is selected
Shader.SetGlobalVector(PerFrameBuffer._SubtractiveShadowColor, CoreUtils.ConvertSRGBToActiveColorSpace(RenderSettings.subtractiveShadowColor));
}
static void SetupPerCameraShaderConstants(CameraData cameraData)
{
Camera camera = cameraData.camera;
float cameraWidth = (float)cameraData.camera.pixelWidth * cameraData.renderScale;
float cameraHeight = (float)cameraData.camera.pixelHeight * cameraData.renderScale;
Shader.SetGlobalVector(PerCameraBuffer._ScaledScreenParams, new Vector4(cameraWidth, cameraHeight, 1.0f + 1.0f / cameraWidth, 1.0f + 1.0f / cameraHeight));
Matrix4x4 projMatrix = GL.GetGPUProjectionMatrix(camera.projectionMatrix, false);
Matrix4x4 viewMatrix = camera.worldToCameraMatrix;
Matrix4x4 viewProjMatrix = projMatrix * viewMatrix;
Matrix4x4 invViewProjMatrix = Matrix4x4.Inverse(viewProjMatrix);
Shader.SetGlobalMatrix(PerCameraBuffer._InvCameraViewProj, invViewProjMatrix);
}
static Lightmapping.RequestLightsDelegate lightsDelegate = (Light[] requests, NativeArray<LightDataGI> lightsOutput) =>
{
LightDataGI lightData = new LightDataGI();
for (int i = 0; i < requests.Length; i++)
{
Light light = requests[i];
switch (light.type)
{
case LightType.Directional:
DirectionalLight directionalLight = new DirectionalLight();
LightmapperUtils.Extract(light, ref directionalLight); lightData.Init(ref directionalLight);
break;
case LightType.Point:
PointLight pointLight = new PointLight();
LightmapperUtils.Extract(light, ref pointLight); lightData.Init(ref pointLight);
break;
case LightType.Spot:
SpotLight spotLight = new SpotLight();
LightmapperUtils.Extract(light, ref spotLight); lightData.Init(ref spotLight);
break;
case LightType.Area:
RectangleLight rectangleLight = new RectangleLight();
LightmapperUtils.Extract(light, ref rectangleLight); lightData.Init(ref rectangleLight);
break;
default:
lightData.InitNoBake(light.GetInstanceID());
break;
}
lightData.falloff = FalloffType.InverseSquared;
lightsOutput[i] = lightData;
}
};
}
}
| |
// Copyright 2022 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gaxgrpc = Google.Api.Gax.Grpc;
using gagr = Google.Api.Gax.ResourceNames;
using wkt = Google.Protobuf.WellKnownTypes;
using grpccore = Grpc.Core;
using moq = Moq;
using st = System.Threading;
using stt = System.Threading.Tasks;
using xunit = Xunit;
namespace Google.Cloud.BinaryAuthorization.V1Beta1.Tests
{
/// <summary>Generated unit tests.</summary>
public sealed class GeneratedBinauthzManagementServiceV1Beta1ClientTest
{
[xunit::FactAttribute]
public void GetPolicyRequestObject()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetPolicyRequest request = new GetPolicyRequest
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.GetPolicy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy response = client.GetPolicy(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetPolicyRequestObjectAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetPolicyRequest request = new GetPolicyRequest
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.GetPolicyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Policy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy responseCallSettings = await client.GetPolicyAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Policy responseCancellationToken = await client.GetPolicyAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetPolicy()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetPolicyRequest request = new GetPolicyRequest
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.GetPolicy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy response = client.GetPolicy(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetPolicyAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetPolicyRequest request = new GetPolicyRequest
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.GetPolicyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Policy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy responseCallSettings = await client.GetPolicyAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Policy responseCancellationToken = await client.GetPolicyAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetPolicyResourceNames()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetPolicyRequest request = new GetPolicyRequest
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.GetPolicy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy response = client.GetPolicy(request.PolicyName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetPolicyResourceNamesAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetPolicyRequest request = new GetPolicyRequest
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.GetPolicyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Policy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy responseCallSettings = await client.GetPolicyAsync(request.PolicyName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Policy responseCancellationToken = await client.GetPolicyAsync(request.PolicyName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void UpdatePolicyRequestObject()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdatePolicyRequest request = new UpdatePolicyRequest
{
Policy = new Policy(),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.UpdatePolicy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy response = client.UpdatePolicy(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task UpdatePolicyRequestObjectAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdatePolicyRequest request = new UpdatePolicyRequest
{
Policy = new Policy(),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.UpdatePolicyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Policy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy responseCallSettings = await client.UpdatePolicyAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Policy responseCancellationToken = await client.UpdatePolicyAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void UpdatePolicy()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdatePolicyRequest request = new UpdatePolicyRequest
{
Policy = new Policy(),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.UpdatePolicy(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy response = client.UpdatePolicy(request.Policy);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task UpdatePolicyAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdatePolicyRequest request = new UpdatePolicyRequest
{
Policy = new Policy(),
};
Policy expectedResponse = new Policy
{
PolicyName = PolicyName.FromProject("[PROJECT]"),
AdmissionWhitelistPatterns =
{
new AdmissionWhitelistPattern(),
},
ClusterAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
DefaultAdmissionRule = new AdmissionRule(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
GlobalPolicyEvaluationMode = Policy.Types.GlobalPolicyEvaluationMode.Disable,
KubernetesServiceAccountAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
IstioServiceIdentityAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
KubernetesNamespaceAdmissionRules =
{
{
"key8a0b6e3c",
new AdmissionRule()
},
},
};
mockGrpcClient.Setup(x => x.UpdatePolicyAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Policy>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Policy responseCallSettings = await client.UpdatePolicyAsync(request.Policy, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Policy responseCancellationToken = await client.UpdatePolicyAsync(request.Policy, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateAttestorRequestObject()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
CreateAttestorRequest request = new CreateAttestorRequest
{
ParentAsProjectName = gagr::ProjectName.FromProject("[PROJECT]"),
AttestorId = "attestor_id76a3ccd6",
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.CreateAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.CreateAttestor(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateAttestorRequestObjectAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
CreateAttestorRequest request = new CreateAttestorRequest
{
ParentAsProjectName = gagr::ProjectName.FromProject("[PROJECT]"),
AttestorId = "attestor_id76a3ccd6",
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.CreateAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.CreateAttestorAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.CreateAttestorAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateAttestor()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
CreateAttestorRequest request = new CreateAttestorRequest
{
ParentAsProjectName = gagr::ProjectName.FromProject("[PROJECT]"),
AttestorId = "attestor_id76a3ccd6",
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.CreateAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.CreateAttestor(request.Parent, request.AttestorId, request.Attestor);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateAttestorAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
CreateAttestorRequest request = new CreateAttestorRequest
{
ParentAsProjectName = gagr::ProjectName.FromProject("[PROJECT]"),
AttestorId = "attestor_id76a3ccd6",
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.CreateAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.CreateAttestorAsync(request.Parent, request.AttestorId, request.Attestor, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.CreateAttestorAsync(request.Parent, request.AttestorId, request.Attestor, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void CreateAttestorResourceNames()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
CreateAttestorRequest request = new CreateAttestorRequest
{
ParentAsProjectName = gagr::ProjectName.FromProject("[PROJECT]"),
AttestorId = "attestor_id76a3ccd6",
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.CreateAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.CreateAttestor(request.ParentAsProjectName, request.AttestorId, request.Attestor);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task CreateAttestorResourceNamesAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
CreateAttestorRequest request = new CreateAttestorRequest
{
ParentAsProjectName = gagr::ProjectName.FromProject("[PROJECT]"),
AttestorId = "attestor_id76a3ccd6",
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.CreateAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.CreateAttestorAsync(request.ParentAsProjectName, request.AttestorId, request.Attestor, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.CreateAttestorAsync(request.ParentAsProjectName, request.AttestorId, request.Attestor, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetAttestorRequestObject()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetAttestorRequest request = new GetAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.GetAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.GetAttestor(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAttestorRequestObjectAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetAttestorRequest request = new GetAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.GetAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.GetAttestorAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.GetAttestorAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetAttestor()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetAttestorRequest request = new GetAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.GetAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.GetAttestor(request.Name);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAttestorAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetAttestorRequest request = new GetAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.GetAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.GetAttestorAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.GetAttestorAsync(request.Name, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void GetAttestorResourceNames()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetAttestorRequest request = new GetAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.GetAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.GetAttestor(request.AttestorName);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task GetAttestorResourceNamesAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
GetAttestorRequest request = new GetAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.GetAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.GetAttestorAsync(request.AttestorName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.GetAttestorAsync(request.AttestorName, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void UpdateAttestorRequestObject()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdateAttestorRequest request = new UpdateAttestorRequest
{
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.UpdateAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.UpdateAttestor(request);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task UpdateAttestorRequestObjectAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdateAttestorRequest request = new UpdateAttestorRequest
{
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.UpdateAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.UpdateAttestorAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.UpdateAttestorAsync(request, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void UpdateAttestor()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdateAttestorRequest request = new UpdateAttestorRequest
{
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.UpdateAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor response = client.UpdateAttestor(request.Attestor);
xunit::Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task UpdateAttestorAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
UpdateAttestorRequest request = new UpdateAttestorRequest
{
Attestor = new Attestor(),
};
Attestor expectedResponse = new Attestor
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
UserOwnedDrydockNote = new UserOwnedDrydockNote(),
UpdateTime = new wkt::Timestamp(),
Description = "description2cf9da67",
};
mockGrpcClient.Setup(x => x.UpdateAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<Attestor>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
Attestor responseCallSettings = await client.UpdateAttestorAsync(request.Attestor, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
xunit::Assert.Same(expectedResponse, responseCallSettings);
Attestor responseCancellationToken = await client.UpdateAttestorAsync(request.Attestor, st::CancellationToken.None);
xunit::Assert.Same(expectedResponse, responseCancellationToken);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteAttestorRequestObject()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
DeleteAttestorRequest request = new DeleteAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
client.DeleteAttestor(request);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteAttestorRequestObjectAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
DeleteAttestorRequest request = new DeleteAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
await client.DeleteAttestorAsync(request, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteAttestorAsync(request, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteAttestor()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
DeleteAttestorRequest request = new DeleteAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
client.DeleteAttestor(request.Name);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteAttestorAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
DeleteAttestorRequest request = new DeleteAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
await client.DeleteAttestorAsync(request.Name, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteAttestorAsync(request.Name, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public void DeleteAttestorResourceNames()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
DeleteAttestorRequest request = new DeleteAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteAttestor(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(expectedResponse);
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
client.DeleteAttestor(request.AttestorName);
mockGrpcClient.VerifyAll();
}
[xunit::FactAttribute]
public async stt::Task DeleteAttestorResourceNamesAsync()
{
moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client> mockGrpcClient = new moq::Mock<BinauthzManagementServiceV1Beta1.BinauthzManagementServiceV1Beta1Client>(moq::MockBehavior.Strict);
DeleteAttestorRequest request = new DeleteAttestorRequest
{
AttestorName = AttestorName.FromProjectAttestor("[PROJECT]", "[ATTESTOR]"),
};
wkt::Empty expectedResponse = new wkt::Empty { };
mockGrpcClient.Setup(x => x.DeleteAttestorAsync(request, moq::It.IsAny<grpccore::CallOptions>())).Returns(new grpccore::AsyncUnaryCall<wkt::Empty>(stt::Task.FromResult(expectedResponse), null, null, null, null));
BinauthzManagementServiceV1Beta1Client client = new BinauthzManagementServiceV1Beta1ClientImpl(mockGrpcClient.Object, null);
await client.DeleteAttestorAsync(request.AttestorName, gaxgrpc::CallSettings.FromCancellationToken(st::CancellationToken.None));
await client.DeleteAttestorAsync(request.AttestorName, st::CancellationToken.None);
mockGrpcClient.VerifyAll();
}
}
}
| |
using System;
using System.Collections;
using System.Globalization;
using System.IO;
using Org.BouncyCastle.Utilities;
using Org.BouncyCastle.Utilities.Collections;
namespace Org.BouncyCastle.Bcpg.OpenPgp
{
/// <remarks>
/// Often a PGP key ring file is made up of a succession of master/sub-key key rings.
/// If you want to read an entire secret key file in one hit this is the class for you.
/// </remarks>
public class PgpSecretKeyRingBundle
{
private readonly IDictionary _secretRings;
private readonly IList _order;
private PgpSecretKeyRingBundle(IDictionary secretRings, IList order)
{
_secretRings = secretRings;
_order = order;
}
public PgpSecretKeyRingBundle(byte[] encoding)
: this(new MemoryStream(encoding, false)) { }
/// <summary>Build a PgpSecretKeyRingBundle from the passed in input stream.</summary>
/// <param name="inputStream">Input stream containing data.</param>
/// <exception cref="IOException">If a problem parsing the stream occurs.</exception>
/// <exception cref="PgpException">If an object is encountered which isn't a PgpSecretKeyRing.</exception>
public PgpSecretKeyRingBundle(Stream inputStream)
: this(new PgpObjectFactory(inputStream).AllPgpObjects()) { }
public PgpSecretKeyRingBundle(IEnumerable e)
{
_secretRings = Platform.CreateHashtable();
_order = Platform.CreateArrayList();
foreach (var obj in e)
{
var pgpSecret = obj as PgpSecretKeyRing;
if (pgpSecret == null)
{
throw new PgpException(obj.GetType().FullName + " found where PgpSecretKeyRing expected");
}
var key = pgpSecret.GetPublicKey().KeyId;
_secretRings.Add(key, pgpSecret);
_order.Add(key);
}
}
[Obsolete("Use 'Count' property instead")]
public int Size
{
get { return _order.Count; }
}
/// <summary>Return the number of rings in this collection.</summary>
public int Count
{
get { return _order.Count; }
}
/// <summary>Allow enumeration of the secret key rings making up this collection.</summary>
public IEnumerable GetKeyRings()
{
return new EnumerableProxy(_secretRings.Values);
}
/// <summary>Allow enumeration of the key rings associated with the passed in userId.</summary>
/// <param name="userId">The user ID to be matched.</param>
/// <returns>An <c>IEnumerable</c> of key rings which matched (possibly none).</returns>
public IEnumerable GetKeyRings(string userId)
{
return GetKeyRings(userId, false, false);
}
/// <summary>Allow enumeration of the key rings associated with the passed in userId.</summary>
/// <param name="userId">The user ID to be matched.</param>
/// <param name="matchPartial">If true, userId need only be a substring of an actual ID string to match.</param>
/// <returns>An <c>IEnumerable</c> of key rings which matched (possibly none).</returns>
public IEnumerable GetKeyRings(string userId, bool matchPartial)
{
return GetKeyRings(userId, matchPartial, false);
}
/// <summary>Allow enumeration of the key rings associated with the passed in userId.</summary>
/// <param name="userId">The user ID to be matched.</param>
/// <param name="matchPartial">If true, userId need only be a substring of an actual ID string to match.</param>
/// <param name="ignoreCase">If true, case is ignored in user ID comparisons.</param>
/// <returns>An <c>IEnumerable</c> of key rings which matched (possibly none).</returns>
public IEnumerable GetKeyRings(string userId, bool matchPartial, bool ignoreCase)
{
var rings = Platform.CreateArrayList();
if (ignoreCase)
{
userId = Platform.StringToLower(userId);
}
foreach (PgpSecretKeyRing secRing in GetKeyRings())
{
foreach (string nextUserId in secRing.GetSecretKey().UserIds)
{
var next = nextUserId;
if (ignoreCase)
{
next = Platform.StringToLower(next);
}
if (matchPartial)
{
if (next.IndexOf(userId, System.StringComparison.Ordinal) > -1)
{
rings.Add(secRing);
}
}
else
{
if (next.Equals(userId))
{
rings.Add(secRing);
}
}
}
}
return new EnumerableProxy(rings);
}
/// <summary>Return the PGP secret key associated with the given key id.</summary>
/// <param name="keyId">The ID of the secret key to return.</param>
public IPgpSecretKey GetSecretKey(long keyId)
{
foreach (PgpSecretKeyRing secRing in GetKeyRings())
{
var sec = secRing.GetSecretKey(keyId);
if (sec != null)
{
return sec;
}
}
return null;
}
/// <summary>Return the secret key ring which contains the key referred to by keyId</summary>
/// <param name="keyId">The ID of the secret key</param>
public PgpSecretKeyRing GetSecretKeyRing(long keyId)
{
var id = keyId;
if (_secretRings.Contains(id))
{
return (PgpSecretKeyRing)_secretRings[id];
}
foreach (PgpSecretKeyRing secretRing in GetKeyRings())
{
var secret = secretRing.GetSecretKey(keyId);
if (secret != null)
{
return secretRing;
}
}
return null;
}
/// <summary>
/// Return true if a key matching the passed in key ID is present, false otherwise.
/// </summary>
/// <param name="keyId">key ID to look for.</param>
public bool Contains(long keyId)
{
return GetSecretKey(keyId) != null;
}
public byte[] GetEncoded()
{
using (var bOut = new MemoryStream())
{
Encode(bOut);
return bOut.ToArray();
}
}
public void Encode(Stream outStr)
{
var bcpgOut = BcpgOutputStream.Wrap(outStr);
foreach (long key in _order)
{
var pub = (PgpSecretKeyRing)_secretRings[key];
pub.Encode(bcpgOut);
}
}
/// <summary>
/// Return a new bundle containing the contents of the passed in bundle and
/// the passed in secret key ring.
/// </summary>
/// <param name="bundle">The <c>PgpSecretKeyRingBundle</c> the key ring is to be added to.</param>
/// <param name="secretKeyRing">The key ring to be added.</param>
/// <returns>A new <c>PgpSecretKeyRingBundle</c> merging the current one with the passed in key ring.</returns>
/// <exception cref="ArgumentException">If the keyId for the passed in key ring is already present.</exception>
public static PgpSecretKeyRingBundle AddSecretKeyRing(PgpSecretKeyRingBundle bundle, PgpSecretKeyRing secretKeyRing)
{
var key = secretKeyRing.GetPublicKey().KeyId;
if (bundle._secretRings.Contains(key))
{
throw new ArgumentException("Collection already contains a key with a keyId for the passed in ring.");
}
var newSecretRings = Platform.CreateHashtable(bundle._secretRings);
var newOrder = Platform.CreateArrayList(bundle._order);
newSecretRings[key] = secretKeyRing;
newOrder.Add(key);
return new PgpSecretKeyRingBundle(newSecretRings, newOrder);
}
/// <summary>
/// Return a new bundle containing the contents of the passed in bundle with
/// the passed in secret key ring removed.
/// </summary>
/// <param name="bundle">The <c>PgpSecretKeyRingBundle</c> the key ring is to be removed from.</param>
/// <param name="secretKeyRing">The key ring to be removed.</param>
/// <returns>A new <c>PgpSecretKeyRingBundle</c> not containing the passed in key ring.</returns>
/// <exception cref="ArgumentException">If the keyId for the passed in key ring is not present.</exception>
public static PgpSecretKeyRingBundle RemoveSecretKeyRing(PgpSecretKeyRingBundle bundle, PgpSecretKeyRing secretKeyRing)
{
var key = secretKeyRing.GetPublicKey().KeyId;
if (!bundle._secretRings.Contains(key))
{
throw new ArgumentException("Collection does not contain a key with a keyId for the passed in ring.");
}
var newSecretRings = Platform.CreateHashtable(bundle._secretRings);
var newOrder = Platform.CreateArrayList(bundle._order);
newSecretRings.Remove(key);
newOrder.Remove(key);
return new PgpSecretKeyRingBundle(newSecretRings, newOrder);
}
}
}
| |
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
namespace Microsoft.Zelig.CodeGeneration.IR
{
using System;
using System.Collections.Generic;
using Microsoft.Zelig.Runtime.TypeSystem;
public partial class ControlFlowGraphStateForCodeTransformation
{
class CacheInfo_SpanningTree : CachedInfo
{
//
// State
//
internal BasicBlock[] m_basicBlocks;
internal BasicBlock[] m_ancestors;
internal Operator[] m_operators;
internal VariableExpression[] m_variables;
internal VariableExpression[][] m_variablesByStorage;
internal VariableExpression[][] m_variablesByAggregate;
//
// Helper Methods
//
protected override void Update()
{
ControlFlowGraphStateForCodeTransformation cfg = (ControlFlowGraphStateForCodeTransformation)m_owner;
using(new PerformanceCounters.ContextualTiming( cfg, "SpanningTree" ))
{
while(true)
{
cfg.UpdateFlowInformation();
foreach(BasicBlock bb in cfg.m_basicBlocks)
{
bb.SpanningTreeIndex = -1;
}
if(m_variables != null)
{
foreach(VariableExpression var in m_variables)
{
var.SpanningTreeIndex = -1;
}
}
//--//
m_ancestors = null;
m_variablesByStorage = null;
m_variablesByAggregate = null;
DataFlow.ControlTree.SpanningTree.Compute( cfg, out m_basicBlocks, out m_operators, out m_variables );
#if DEBUG
foreach(Operator op in m_operators)
{
CHECKS.ASSERT( op.BasicBlock.Owner == cfg, "Operator {0} does not belong to {1}", op, cfg.m_md );
}
#endif
//--//
//
// If any basic block got removed from the flow graph, it will become unreachable.
// Detect that and recompute the flow information.
//
bool fUpdateFlowInformation = false;
foreach(Operator op in m_operators)
{
PhiOperator phiOp = op as PhiOperator;
if(phiOp != null)
{
fUpdateFlowInformation |= phiOp.AdjustLinkage();
}
}
foreach(BasicBlock bb in cfg.m_basicBlocks)
{
if(bb.SpanningTreeIndex == -1)
{
CHECKS.ASSERT( ArrayUtility.FindReferenceInNotNullArray( m_basicBlocks, bb ) == -1, "{0} belongs in the spanning tree for {1} without an index", bb, cfg );
bb.Delete();
fUpdateFlowInformation = true;
}
}
cfg.m_basicBlocks = m_basicBlocks;
if(!fUpdateFlowInformation) break;
}
}
}
}
//
// Constructor Methods
//
//--//
//
// Helper Methods
//
private static VariableExpression[][] BuildStorageTable( VariableExpression[] variables )
{
int varNum = variables.Length;
VariableExpression[][] variablesByStorage = new VariableExpression[varNum][];
for(int varIdx = 0; varIdx < varNum; varIdx++)
{
if(variablesByStorage[varIdx] == null)
{
VariableExpression var = variables[varIdx];
if(var.AliasedVariable is LowLevelVariableExpression)
{
VariableExpression[] array = VariableExpression.SharedEmptyArray;
//
// We only need to scan forward.
// If we got here, it was because we didn't encounter an equivalent storage before this point.
//
for(int varIdx2 = varIdx; varIdx2 < varNum; varIdx2++)
{
VariableExpression var2 = variables[varIdx2];
if(var2 != null && var.IsTheSamePhysicalEntity( var2 ))
{
array = ArrayUtility.AppendToNotNullArray( array, var2 );
}
}
//
// Propagate the table to all the variables mapped to the same storage.
//
foreach(VariableExpression var3 in array)
{
variablesByStorage[var3.SpanningTreeIndex] = array;
}
}
else
{
variablesByStorage[varIdx] = new VariableExpression[] { var };
}
}
}
return variablesByStorage;
}
private static VariableExpression[][] BuildAggregationTable( VariableExpression[] variables )
{
int varNum = variables.Length;
VariableExpression[][] variablesByAggregate = new VariableExpression[varNum][];
for(int varIdx = 0; varIdx < varNum; varIdx++)
{
if(variablesByAggregate[varIdx] == null)
{
VariableExpression var = variables[varIdx];
VariableExpression[] array = VariableExpression.SharedEmptyArray;
//
// We only need to scan forward.
// If we got here, it was because we didn't encounter an equivalent storage before this point.
//
for(int varIdx2 = varIdx; varIdx2 < varNum; varIdx2++)
{
VariableExpression var2 = variables[varIdx2];
if(var2 != null && var.IsTheSameAggregate( var2 ))
{
array = ArrayUtility.AppendToNotNullArray( array, var2 );
}
}
//
// Propagate the table to all the variables mapped to the same storage.
//
foreach(VariableExpression var3 in array)
{
variablesByAggregate[var3.SpanningTreeIndex] = array;
}
}
}
return variablesByAggregate;
}
//--//
public IDisposable LockSpanningTree()
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
ci.Lock();
return ci;
}
//--//
public OperatorEnumeratorProvider< T > FilterOperators< T >() where T : Operator
{
return new OperatorEnumeratorProvider< T >( this.DataFlow_SpanningTree_Operators );
}
public struct OperatorEnumeratorProvider< T > where T : Operator
{
//
// State
//
private readonly Operator[] m_values;
//
// Constructor Methods
//
internal OperatorEnumeratorProvider( Operator[] values )
{
m_values = values;
}
//
// Helper Methods
//
public OperatorEnumerator< T > GetEnumerator()
{
return new OperatorEnumerator< T >( m_values );
}
}
public struct OperatorEnumerator< T > where T : Operator
{
//
// State
//
private readonly Operator[] m_values;
private T m_current;
private int m_index;
//
// Constructor Methods
//
internal OperatorEnumerator( Operator[] values )
{
m_values = values;
m_current = null;
m_index = 0;
}
//
// Helper Methods
//
public void Dispose()
{
}
public bool MoveNext()
{
while(m_index < m_values.Length)
{
T res = m_values[m_index++] as T;
if(res != null)
{
if(res.BasicBlock != null) // Skip deleted operators.
{
m_current = res;
return true;
}
}
}
m_current = null;
return false;
}
public T Current
{
get
{
return m_current;
}
}
}
//
// Access Methods
//
[System.Diagnostics.DebuggerBrowsable( System.Diagnostics.DebuggerBrowsableState.Never )]
public BasicBlock[] DataFlow_SpanningTree_BasicBlocks
{
get
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
return ci.m_basicBlocks;
}
}
[System.Diagnostics.DebuggerBrowsable( System.Diagnostics.DebuggerBrowsableState.Never )]
public BasicBlock[] DataFlow_SpanningTree_Ancestors
{
get
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
if(ci.m_ancestors == null)
{
ci.m_ancestors = DataFlow.ControlTree.SpanningTree.ComputeAncestors( ci.m_basicBlocks );
}
return ci.m_ancestors;
}
}
[System.Diagnostics.DebuggerBrowsable( System.Diagnostics.DebuggerBrowsableState.Never )]
public Operator[] DataFlow_SpanningTree_Operators
{
get
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
return ci.m_operators;
}
}
[System.Diagnostics.DebuggerBrowsable( System.Diagnostics.DebuggerBrowsableState.Never )]
public VariableExpression[] DataFlow_SpanningTree_Variables
{
get
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
return ci.m_variables;
}
}
[System.Diagnostics.DebuggerBrowsable( System.Diagnostics.DebuggerBrowsableState.Never )]
public VariableExpression[][] DataFlow_SpanningTree_VariablesByStorage // It's indexed as VariableExpression[<variable index>][<all the variables that have the same storage location>]
{
get
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
if(ci.m_variablesByStorage == null)
{
ci.m_variablesByStorage = BuildStorageTable( ci.m_variables );
}
return ci.m_variablesByStorage;
}
}
[System.Diagnostics.DebuggerBrowsable( System.Diagnostics.DebuggerBrowsableState.Never )]
public VariableExpression[][] DataFlow_SpanningTree_VariablesByAggregate // It's indexed as VariableExpression[<variable index>][<all the variables that have the same storage location>]
{
get
{
var ci = GetCachedInfo< CacheInfo_SpanningTree >();
if(ci.m_variablesByAggregate == null)
{
ci.m_variablesByAggregate = BuildAggregationTable( ci.m_variables );
}
return ci.m_variablesByAggregate;
}
}
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Mail;
using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
namespace ASC.Web.Studio.Core.Import
{
public class MultiFormatTextFileUserImporter : TextFileUserImporter
{
public MultiFormatTextFileUserImporter(Stream stream)
: base(stream)
{
}
public MultiFormatTextFileUserImporter(string csvText)
: base(csvText)
{
}
protected override ContactInfo GetExportedUser(string line, IDictionary<int, PropertyInfo> mappedProperties, int fieldsCount)
{
try
{
var address = new MailAddress(line);
var info = new ContactInfo { Email = address.Address };
if (!string.IsNullOrEmpty(address.DisplayName))
{
if (address.DisplayName.Contains(' '))
{
//Try split
info.FirstName = address.DisplayName.Split(' ')[0];
info.LastName = address.DisplayName.Split(' ')[1];
}
else
{
info.FirstName = address.DisplayName;
}
}
return info;
}
catch (Exception)
{
//thats bad. Failed to parse an address
}
return base.GetExportedUser(line, mappedProperties, fieldsCount);
}
}
public class TextFileUserImporter : IUserImporter
{
private readonly Stream stream;
private readonly string text;
private readonly Encoding encoding;
protected Dictionary<string, string> NameMapping { get; set; }
protected IList<string> ExcludeList { get; private set; }
public char[] Separators { get; set; }
public bool HasHeader { get; set; }
public string TextDelmiter { get; set; }
public string DefaultHeader { get; set; }
public TextFileUserImporter(Stream stream)
{
this.stream = stream;
try
{
encoding = Encoding.GetEncoding(CultureInfo.CurrentCulture.TextInfo.ANSICodePage);
}
catch
{
encoding = Encoding.UTF8;
}
HasHeader = false;
Separators = new[] { ';', ',' };
TextDelmiter = "\"";
ExcludeList = new List<string> { "ID", "Status" };
}
public TextFileUserImporter(string csvText)
{
this.encoding = Encoding.UTF8;
text = csvText;
HasHeader = false;
Separators = new[] { ';', ',' };
TextDelmiter = "\"";
ExcludeList = new List<string> { "ID", "Status" };
}
public IEnumerable<ContactInfo> GetDiscoveredUsers()
{
var users = new List<ContactInfo>();
var fileLines = new List<string>();
if (stream != null)
{
using (var reader = new StreamReader(stream, encoding, true))
{
fileLines.AddRange(reader.ReadToEnd().Split(new[] { Environment.NewLine, "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries));
}
}
else
{
fileLines.AddRange(text.Split(new[] { Environment.NewLine, "\n", "\r\n" }, StringSplitOptions.RemoveEmptyEntries));
}
if (!string.IsNullOrEmpty(DefaultHeader))
{
fileLines.Insert(0, DefaultHeader);
}
if (0 < fileLines.Count)
{
var mappedProperties = new Dictionary<int, PropertyInfo>();
var infos = typeof(ContactInfo).GetProperties(BindingFlags.Public | BindingFlags.Instance);
var fieldsCount = GetFieldsMapping(fileLines[0], infos, mappedProperties);
for (int i = 1; i < fileLines.Count; i++)
{
users.Add(GetExportedUser(fileLines[i], mappedProperties, fieldsCount));
}
}
return users;
}
protected virtual ContactInfo GetExportedUser(string line, IDictionary<int, PropertyInfo> mappedProperties, int fieldsCount)
{
var exportedUser = new ContactInfo();
var dataFields = GetDataFields(line);
for (int j = 0; j < Math.Min(fieldsCount, dataFields.Length); j++)
{
var propinfo = mappedProperties[j];
if (propinfo != null)
{
var value = ConvertFromString(dataFields[j], propinfo.PropertyType);
if (value != null)
{
value = Regex.Replace(value.ToString(), "(^')|(^\")|(\"$)|('$)", String.Empty);
propinfo.SetValue(exportedUser, value, null);
}
}
}
try
{
if (string.IsNullOrEmpty(exportedUser.FirstName) && string.IsNullOrEmpty(exportedUser.LastName) && !string.IsNullOrEmpty(exportedUser.Email))
{
var username = exportedUser.Email.Contains('@') ? exportedUser.Email.Substring(0, exportedUser.Email.IndexOf('@')) : exportedUser.Email;
if (username.Contains('.'))
{
exportedUser.FirstName = username.Split('.')[0];
exportedUser.LastName = username.Split('.')[1];
}
}
}
catch { }
return exportedUser;
}
private string[] GetDataFields(string line)
{
var pattern = String.Format("[{0}](?=(?:[^\"]*\"[^\"]*\")*(?![^\"]*\"))", string.Join("|", Array.ConvertAll(Separators, c => c.ToString())));
var result = Regex.Split(line, pattern);
return Array.ConvertAll<string, string>(result,
original =>
{
return original.StartsWith(TextDelmiter) && original.EndsWith(TextDelmiter) ?
original.Substring(1, original.Length - 2) :
original;
}
);
}
private int GetFieldsMapping(string firstLine, IEnumerable<PropertyInfo> infos, IDictionary<int, PropertyInfo> mappedProperties)
{
var fields = firstLine.Split(Separators, StringSplitOptions.RemoveEmptyEntries);
for (int i = 0; i < fields.Length; i++)
{
var field = fields[i];
//Find apropriate field in UserInfo
foreach (var info in infos)
{
var propertyField = field.Trim();
propertyField = propertyField.Trim('"');
if (NameMapping != null && NameMapping.ContainsKey(propertyField))
{
propertyField = NameMapping[propertyField];
}
if (!string.IsNullOrEmpty(propertyField) && !ExcludeList.Contains(propertyField) && propertyField.Equals(info.Name, StringComparison.OrdinalIgnoreCase))
{
//Add to map
mappedProperties.Add(i, info);
}
}
if (!mappedProperties.ContainsKey(i))
{
//No property was found
mappedProperties.Add(i, null);
}
}
if (!mappedProperties.Values.Any(p => p != null))
{
mappedProperties[2] = infos.First(p => p.Name == "Email");
mappedProperties[0] = infos.First(p => p.Name == "FirstName");
mappedProperties[1] = infos.First(p => p.Name == "LastName");
}
return fields.Length;
}
private static object ConvertFromString(string value, Type type)
{
var converter = TypeDescriptor.GetConverter(type);
return converter != null && converter.CanConvertFrom(typeof(string)) ? converter.ConvertFromString(value) : null;
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Diagnostics.Contracts;
using Microsoft.Research.ClousotRegression;
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test2'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.intern' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test2'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test3'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.prot' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test3'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test4'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.prot' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test4'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.intern' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test4'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test5'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.prot' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test5'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolations.intern' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolations.Test5'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test2'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test3'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.prot' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test3'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test4'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.prot' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test4'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.priv' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test5'.")]
[assembly: RegressionOutcome("Member 'BasicInfrastructure.VisibilityViolationsInternal.prot' has less visibility than the enclosing method 'BasicInfrastructure.VisibilityViolationsInternal.Test5'.")]
[assembly: RegressionOutcome("Method 'BasicInfrastructure.OverrideViolations.ToString' overrides 'System.Object.ToString', thus cannot add Requires.")]
// This one is for the AnalysisInfrastructure9 tests where all is compiled together rather than per file.
#if !NETFRAMEWORK_3_5 && !NETFRAMEWORK_4_0
[assembly: RegressionOutcome("Method 'BasicInfrastructure.LegacyRequires.UsesLegacyRequires(System.Int32,System.String)' has custom parameter validation but assembly mode is not set to support this. It will be treated as Requires<E>.")]
#endif
namespace BasicInfrastructure
{
class LegacyRequires
{
static void UsesLegacyRequires(int x, string y)
{
if (x < 0) throw new Exception();
if (y == null) throw new Exception();
Contract.EndContractBlock();
}
[ClousotRegressionTest("regular")]
#if CLOUSOT2
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: x >= 0", PrimaryILOffset = 3, MethodILOffset = 3)]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"requires unproven: y != null",PrimaryILOffset=20,MethodILOffset=3)]
#else
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: x >= 0", PrimaryILOffset = 17, MethodILOffset = 3)]
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: y != null", PrimaryILOffset = 34, MethodILOffset = 3)]
#endif
static void TestCaller1(int x, string y)
{
// Expected to fail
UsesLegacyRequires(x, y);
}
[ClousotRegressionTest("regular")]
#if CLOUSOT2
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"requires is valid",PrimaryILOffset=3,MethodILOffset=29)]
[RegressionOutcome(Outcome=ProofOutcome.True,Message=@"requires is valid",PrimaryILOffset=20,MethodILOffset=29)]
#else
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 17, MethodILOffset = 29)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 34, MethodILOffset = 29)]
#endif
static void TestCaller2(int x, string y)
{
Contract.Requires(x >= 0);
Contract.Requires(y != null);
// Expected to pass
UsesLegacyRequires(x, y);
}
}
class UnsatisfiableRequires
{
[ClousotRegressionTest("regular")]
// [RegressionOutcome("method Requires (including invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.Test1(System.Int32)")]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"method Requires (including invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.Test1(System.Int32)",PrimaryILOffset=21,MethodILOffset=0)]
public static void Test1(int x)
{
Contract.Requires(x > 0);
Contract.Requires(x < 0);
}
[ClousotRegressionTest("regular")]
//[RegressionOutcome("method Requires (including inherited requires and invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.Test2(System.String)")]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"method Requires (including inherited requires and invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.Test2(System.String)",PrimaryILOffset=24,MethodILOffset=0)]
public virtual void Test2(string x)
{
Contract.Requires(x != null);
Contract.Requires(x == null);
}
int Field = 1;
string Name = "foo";
[ContractInvariantMethod]
private void ObjectInvariant()
{
Contract.Invariant(Field > 0);
Contract.Invariant(Name != null);
}
[ClousotRegressionTest("regular")]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 2, MethodILOffset = 0)]
//[RegressionOutcome("method Requires (including invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.TestWithInv()")]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"method Requires (including invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.TestWithInv()",PrimaryILOffset=19,MethodILOffset=0)]
private void TestWithInv()
{
Contract.Requires(Field <= 0);
}
[ClousotRegressionTest("regular")]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 2, MethodILOffset = 0)]
//[RegressionOutcome("method Requires (including invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.TestWithInv2()")]
[RegressionOutcome(Outcome=ProofOutcome.Top,Message=@"method Requires (including invariants) are unsatisfiable: BasicInfrastructure.UnsatisfiableRequires.TestWithInv2()",PrimaryILOffset=16,MethodILOffset=0)]
private void TestWithInv2()
{
Contract.Requires(Name == null);
}
}
public class VisibilityViolations
{
private int priv = 1;
protected int prot = 1;
internal int intern = 1;
public int pub = 1;
private void Test1()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
protected void Test2()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
internal void Test3()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
internal protected void Test4()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
public void Test5()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
}
internal class VisibilityViolationsInternal
{
private int priv = 1;
protected int prot = 1;
internal int intern = 1;
public int pub = 1;
private void Test1()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
protected void Test2()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
internal void Test3()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
internal protected void Test4()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
public void Test5()
{
Contract.Requires(priv == 0);
Contract.Requires(prot == 0);
Contract.Requires(intern == 0);
Contract.Requires(pub == 0);
}
}
public class OverrideViolations
{
public int x;
public override string ToString()
{
Contract.Requires(x == 0);
return "";
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenMetaverse;
using OpenMetaverse.Packets;
using OpenSim.Framework;
using OpenSim.Region.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Region.Framework.Scenes.Serialization;
namespace OpenSim.Region.CoreModules.World.Objects.BuySell
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "BuySellModule")]
public class BuySellModule : IBuySellModule, INonSharedRegionModule
{
// private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
protected Scene m_scene = null;
protected IDialogModule m_dialogModule;
public string Name { get { return "Object BuySell Module"; } }
public Type ReplaceableInterface { get { return null; } }
public void Initialise(IConfigSource source) {}
public void AddRegion(Scene scene)
{
m_scene = scene;
m_scene.RegisterModuleInterface<IBuySellModule>(this);
m_scene.EventManager.OnNewClient += SubscribeToClientEvents;
}
public void RemoveRegion(Scene scene)
{
m_scene.EventManager.OnNewClient -= SubscribeToClientEvents;
}
public void RegionLoaded(Scene scene)
{
m_dialogModule = scene.RequestModuleInterface<IDialogModule>();
}
public void Close()
{
RemoveRegion(m_scene);
}
public void SubscribeToClientEvents(IClientAPI client)
{
client.OnObjectSaleInfo += ObjectSaleInfo;
}
protected void ObjectSaleInfo(
IClientAPI client, UUID agentID, UUID sessionID, uint localID, byte saleType, int salePrice)
{
SceneObjectPart part = m_scene.GetSceneObjectPart(localID);
if (part == null || part.ParentGroup == null)
return;
if (part.ParentGroup.IsDeleted)
return;
if (part.OwnerID != client.AgentId && (!m_scene.Permissions.IsGod(client.AgentId)))
return;
part = part.ParentGroup.RootPart;
part.ObjectSaleType = saleType;
part.SalePrice = salePrice;
part.ParentGroup.HasGroupChanged = true;
part.GetProperties(client);
}
public bool BuyObject(IClientAPI remoteClient, UUID categoryID, uint localID, byte saleType, int salePrice)
{
SceneObjectPart part = m_scene.GetSceneObjectPart(localID);
if (part == null)
return false;
if (part.ParentGroup == null)
return false;
SceneObjectGroup group = part.ParentGroup;
switch (saleType)
{
case 1: // Sell as original (in-place sale)
uint effectivePerms = group.GetEffectivePermissions();
if ((effectivePerms & (uint)PermissionMask.Transfer) == 0)
{
if (m_dialogModule != null)
m_dialogModule.SendAlertToUser(remoteClient, "This item doesn't appear to be for sale");
return false;
}
group.SetOwnerId(remoteClient.AgentId);
group.SetRootPartOwner(part, remoteClient.AgentId, remoteClient.ActiveGroupId);
if (m_scene.Permissions.PropagatePermissions())
{
foreach (SceneObjectPart child in group.Parts)
{
child.Inventory.ChangeInventoryOwner(remoteClient.AgentId);
child.TriggerScriptChangedEvent(Changed.OWNER);
child.ApplyNextOwnerPermissions();
}
}
part.ObjectSaleType = 0;
part.SalePrice = 10;
group.HasGroupChanged = true;
part.GetProperties(remoteClient);
part.TriggerScriptChangedEvent(Changed.OWNER);
group.ResumeScripts();
part.ScheduleFullUpdate();
break;
case 2: // Sell a copy
Vector3 inventoryStoredPosition = new Vector3
(((group.AbsolutePosition.X > (int)Constants.RegionSize)
? 250
: group.AbsolutePosition.X)
,
(group.AbsolutePosition.X > (int)Constants.RegionSize)
? 250
: group.AbsolutePosition.X,
group.AbsolutePosition.Z);
Vector3 originalPosition = group.AbsolutePosition;
group.AbsolutePosition = inventoryStoredPosition;
string sceneObjectXml = SceneObjectSerializer.ToOriginalXmlFormat(group);
group.AbsolutePosition = originalPosition;
uint perms = group.GetEffectivePermissions();
if ((perms & (uint)PermissionMask.Transfer) == 0)
{
if (m_dialogModule != null)
m_dialogModule.SendAlertToUser(remoteClient, "This item doesn't appear to be for sale");
return false;
}
AssetBase asset = m_scene.CreateAsset(
group.GetPartName(localID),
group.GetPartDescription(localID),
(sbyte)AssetType.Object,
Utils.StringToBytes(sceneObjectXml),
group.OwnerID);
m_scene.AssetService.Store(asset);
InventoryItemBase item = new InventoryItemBase();
item.CreatorId = part.CreatorID.ToString();
item.CreatorData = part.CreatorData;
item.ID = UUID.Random();
item.Owner = remoteClient.AgentId;
item.AssetID = asset.FullID;
item.Description = asset.Description;
item.Name = asset.Name;
item.AssetType = asset.Type;
item.InvType = (int)InventoryType.Object;
item.Folder = categoryID;
uint nextPerms=(perms & 7) << 13;
if ((nextPerms & (uint)PermissionMask.Copy) == 0)
perms &= ~(uint)PermissionMask.Copy;
if ((nextPerms & (uint)PermissionMask.Transfer) == 0)
perms &= ~(uint)PermissionMask.Transfer;
if ((nextPerms & (uint)PermissionMask.Modify) == 0)
perms &= ~(uint)PermissionMask.Modify;
item.BasePermissions = perms & part.NextOwnerMask;
item.CurrentPermissions = perms & part.NextOwnerMask;
item.NextPermissions = part.NextOwnerMask;
item.EveryOnePermissions = part.EveryoneMask &
part.NextOwnerMask;
item.GroupPermissions = part.GroupMask &
part.NextOwnerMask;
item.Flags |= (uint)InventoryItemFlags.ObjectSlamPerm;
item.CreationDate = Util.UnixTimeSinceEpoch();
if (m_scene.AddInventoryItem(item))
{
remoteClient.SendInventoryItemCreateUpdate(item, 0);
}
else
{
if (m_dialogModule != null)
m_dialogModule.SendAlertToUser(remoteClient, "Cannot buy now. Your inventory is unavailable");
return false;
}
break;
case 3: // Sell contents
List<UUID> invList = part.Inventory.GetInventoryList();
bool okToSell = true;
foreach (UUID invID in invList)
{
TaskInventoryItem item1 = part.Inventory.GetInventoryItem(invID);
if ((item1.CurrentPermissions &
(uint)PermissionMask.Transfer) == 0)
{
okToSell = false;
break;
}
}
if (!okToSell)
{
if (m_dialogModule != null)
m_dialogModule.SendAlertToUser(
remoteClient, "This item's inventory doesn't appear to be for sale");
return false;
}
if (invList.Count > 0)
m_scene.MoveTaskInventoryItems(remoteClient.AgentId, part.Name, part, invList);
break;
}
return true;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// Uncomment for more asserts (slows down tests)
// This will add additional asserts to help debugging issues
//#define ASSERT_VERBOSE
// This will trigger comparison between attributes and namespaces
// Currently XDocument is sorting attributes while other XPathNavigators aren't
//#define CHECK_ATTRIBUTE_ORDER
using System;
using System.Collections.Generic;
using System.Text;
using System.Xml.XPath;
using Xunit;
namespace System.Xml.XPath.XDocument.Tests.XDocument
{
public class NavigatorComparer : XPathNavigator
{
private XPathNavigator _nav1, _nav2;
private static void CompareNavigators(XPathNavigator a, XPathNavigator b)
{
#if ASSERT_VERBOSE
Assert.NotNull(a);
Assert.NotNull(b);
CompareNodeTypes(a.NodeType, b.NodeType);
if (AreComparableNodes(a.NodeType, b.NodeType))
{
CompareValues(a, b);
Assert.Equal(a.Name, b.Name);
}
#endif
}
private static bool IsWhitespaceOrText(XPathNodeType nodeType)
{
return nodeType == XPathNodeType.Whitespace || nodeType == XPathNodeType.Text || nodeType == XPathNodeType.SignificantWhitespace;
}
private static bool IsNamespaceOrAttribute(XPathNodeType nodeType)
{
return nodeType == XPathNodeType.Namespace || nodeType == XPathNodeType.Attribute;
}
private static bool AreComparableNodes(XPathNodeType a, XPathNodeType b)
{
bool areBothTextOrWhitespaces = IsWhitespaceOrText(a) && IsWhitespaceOrText(b);
bool areBothNamespacesOrAttributes = IsNamespaceOrAttribute(a) && IsNamespaceOrAttribute(b);
#if CHECK_ATTRIBUTE_ORDER
areBothNamespacesOrAttributes = false
#endif
return !areBothTextOrWhitespaces && !areBothNamespacesOrAttributes;
}
private static void CompareNodeTypes(XPathNodeType a, XPathNodeType b)
{
// XPath.XDocument interprets whitespaces as XPathNodeType.Text
// while other XPath navigators do it properly
Assert.Equal(IsWhitespaceOrText(a), IsWhitespaceOrText(b));
Assert.Equal(IsNamespaceOrAttribute(a), IsNamespaceOrAttribute(b));
if (!IsWhitespaceOrText(a) && !IsNamespaceOrAttribute(a))
{
Assert.Equal(a, b);
}
}
private static void CompareValues(XPathNavigator a, XPathNavigator b)
{
// In order to account for Desktop vs CoreCLR difference in implementation of XmlDocument we need to normalize line endings to conform XML specification.
string sa = a.Value.Replace("\r\n", "\n").Replace("\r", "\n");
string sb = b.Value.Replace("\r\n", "\n").Replace("\r", "\n");
Assert.Equal(sa, sb);
}
public NavigatorComparer(XPathNavigator nav1, XPathNavigator nav2)
{
_nav1 = nav1;
_nav2 = nav2;
}
public override string ToString()
{
var r1 = _nav1.ToString();
var r2 = _nav2.ToString();
Assert.Equal(r1, r2);
return r1;
}
public override void SetValue(string value)
{
_nav1.SetValue(value);
_nav2.SetValue(value);
CompareNavigators(_nav1, _nav2);
}
public override object TypedValue
{
get
{
// No point of comparing by ref
return _nav1.TypedValue;
}
}
public override void SetTypedValue(object value)
{
_nav1.SetTypedValue(value);
_nav2.SetTypedValue(value);
CompareNavigators(_nav1, _nav2);
}
public override Type ValueType
{
get
{
var r1 = _nav1.ValueType;
var r2 = _nav2.ValueType;
Assert.Equal(r1, r2);
return r1;
}
}
public override bool ValueAsBoolean
{
get
{
var r1 = _nav1.ValueAsBoolean;
var r2 = _nav2.ValueAsBoolean;
Assert.Equal(r1, r2);
return r1;
}
}
public override DateTime ValueAsDateTime
{
get
{
var r1 = _nav1.ValueAsDateTime;
var r2 = _nav2.ValueAsDateTime;
Assert.Equal(r1, r2);
return r1;
}
}
public override Double ValueAsDouble
{
get
{
var r1 = _nav1.ValueAsDouble;
var r2 = _nav2.ValueAsDouble;
Assert.Equal(r1, r2);
return r1;
}
}
public override Int32 ValueAsInt
{
get
{
var r1 = _nav1.ValueAsInt;
var r2 = _nav2.ValueAsInt;
Assert.Equal(r1, r2);
return r1;
}
}
public override Int64 ValueAsLong
{
get
{
var r1 = _nav1.ValueAsLong;
var r2 = _nav2.ValueAsLong;
Assert.Equal(r1, r2);
return r1;
}
}
public override object ValueAs(Type type, IXmlNamespaceResolver resolver)
{
var r1 = _nav1.ValueAs(type, resolver);
var r2 = _nav2.ValueAs(type, resolver);
Assert.Equal(r1, r2);
return r1;
}
public override XPathNavigator CreateNavigator()
{
var r1 = _nav1.CreateNavigator();
var r2 = _nav2.CreateNavigator();
return new NavigatorComparer(r1, r2);
}
public override XmlNameTable NameTable
{
get
{
// comparing NameTable might be unreliable
return _nav1.NameTable;
}
}
public override string LookupNamespace(string value)
{
var r1 = _nav1.LookupNamespace(value);
var r2 = _nav2.LookupNamespace(value);
Assert.Equal(r1, r2);
return r1;
}
public override string LookupPrefix(string value)
{
var r1 = _nav1.LookupPrefix(value);
var r2 = _nav2.LookupPrefix(value);
Assert.Equal(r1, r2);
return r1;
}
public override IDictionary<string, string> GetNamespacesInScope(XmlNamespaceScope value)
{
var r1 = _nav1.GetNamespacesInScope(value);
var r2 = _nav2.GetNamespacesInScope(value);
Assert.Equal(r1, r2);
return r1;
}
public override XPathNavigator Clone()
{
return new NavigatorComparer(_nav1.Clone(), _nav2.Clone());
}
public override XPathNodeType NodeType
{
get
{
var r1 = _nav1.NodeType;
var r2 = _nav2.NodeType;
CompareNodeTypes(r1, r2);
return r1;
}
}
public override string LocalName
{
get
{
var r1 = _nav1.LocalName;
var r2 = _nav2.LocalName;
#if CHECK_ATTRIBUTE_ORDER
Assert.Equal(r1, r2);
#else
CompareNodeTypes(_nav1.NodeType, _nav2.NodeType);
if (!IsNamespaceOrAttribute(_nav1.NodeType))
{
Assert.Equal(r1, r2);
}
#endif
return r1;
}
}
public override string Name
{
get
{
var r1 = _nav1.Name;
var r2 = _nav2.Name;
#if CHECK_ATTRIBUTE_ORDER
Assert.Equal(r1, r2);
#else
CompareNodeTypes(_nav1.NodeType, _nav2.NodeType);
if (!IsNamespaceOrAttribute(_nav1.NodeType))
{
Assert.Equal(r1, r2);
}
#endif
return r1;
}
}
public override string NamespaceURI
{
get
{
var r1 = _nav1.NamespaceURI;
var r2 = _nav2.NamespaceURI;
Assert.Equal(r1, r2);
return r1;
}
}
public override string Prefix
{
get
{
var r1 = _nav1.Prefix;
var r2 = _nav2.Prefix;
Assert.Equal(r1, r2);
return r1;
}
}
public override string BaseURI
{
get
{
var r1 = _nav1.BaseURI;
var r2 = _nav2.BaseURI;
Assert.Equal(r1, r2);
return r1;
}
}
public override bool IsEmptyElement
{
get
{
var r1 = _nav1.IsEmptyElement;
var r2 = _nav2.IsEmptyElement;
Assert.Equal(r1, r2);
return r1;
}
}
public override string XmlLang
{
get
{
var r1 = _nav1.XmlLang;
var r2 = _nav2.XmlLang;
Assert.Equal(r1, r2);
return r1;
}
}
public override XmlReader ReadSubtree()
{
// no point of comparing
return _nav1.ReadSubtree();
}
public override void WriteSubtree(XmlWriter writer)
{
throw new NotSupportedException("WriteSubtree not supported yet.");
}
public override object UnderlyingObject
{
get
{
// no point of comparing
return _nav1.UnderlyingObject;
}
}
public override bool HasAttributes
{
get
{
var r1 = _nav1.HasAttributes;
var r2 = _nav2.HasAttributes;
Assert.Equal(r1, r2);
return r1;
}
}
public override string GetAttribute(string a, string b)
{
var r1 = _nav1.GetAttribute(a, b);
var r2 = _nav2.GetAttribute(a, b);
Assert.Equal(r1, r2);
return r1;
}
public override bool MoveToAttribute(string a, string b)
{
var r1 = _nav1.MoveToAttribute(a, b);
var r2 = _nav2.MoveToAttribute(a, b);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFirstAttribute()
{
var r1 = _nav1.MoveToFirstAttribute();
var r2 = _nav2.MoveToFirstAttribute();
Assert.Equal(r1, r2);
#if CHECK_ATTRIBUTE_ORDER
CompareNavigators(nav1, nav2);
#endif
return r1;
}
public override bool MoveToNextAttribute()
{
var r1 = _nav1.MoveToNextAttribute();
var r2 = _nav2.MoveToNextAttribute();
Assert.Equal(r1, r2);
#if CHECK_ATTRIBUTE_ORDER
CompareNavigators(nav1, nav2);
#endif
return r1;
}
public override string GetNamespace(string value)
{
var r1 = _nav1.GetNamespace(value);
var r2 = _nav2.GetNamespace(value);
Assert.Equal(r1, r2);
return r1;
}
public override bool MoveToNamespace(string value)
{
var r1 = _nav1.MoveToNamespace(value);
var r2 = _nav2.MoveToNamespace(value);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFirstNamespace(XPathNamespaceScope value)
{
var r1 = _nav1.MoveToFirstNamespace(value);
var r2 = _nav2.MoveToFirstNamespace(value);
Assert.Equal(r1, r2);
#if CHECK_ATTRIBUTE_ORDER
CompareNavigators(nav1, nav2);
#endif
return r1;
}
public override bool MoveToNextNamespace(XPathNamespaceScope value)
{
var r1 = _nav1.MoveToNextNamespace(value);
var r2 = _nav2.MoveToNextNamespace(value);
Assert.Equal(r1, r2);
#if CHECK_ATTRIBUTE_ORDER
CompareNavigators(nav1, nav2);
#endif
return r1;
}
public override bool MoveToNext()
{
var r1 = _nav1.MoveToNext();
var r2 = _nav2.MoveToNext();
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToPrevious()
{
var r1 = _nav1.MoveToPrevious();
var r2 = _nav2.MoveToPrevious();
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFirst()
{
var r1 = _nav1.MoveToFirst();
var r2 = _nav2.MoveToFirst();
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFirstChild()
{
var r1 = _nav1.MoveToFirstChild();
var r2 = _nav2.MoveToFirstChild();
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToParent()
{
var r1 = _nav1.MoveToParent();
var r2 = _nav2.MoveToParent();
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override void MoveToRoot()
{
_nav1.MoveToRoot();
_nav2.MoveToRoot();
CompareNavigators(_nav1, _nav2);
}
public override bool MoveTo(XPathNavigator value)
{
NavigatorComparer comp = value as NavigatorComparer;
if (comp == null)
{
throw new NotSupportedException("MoveTo(XPathNavigator) not supported.");
}
var r1 = _nav1.MoveTo(comp._nav1);
var r2 = _nav2.MoveTo(comp._nav2);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToId(string value)
{
var r1 = _nav1.MoveToId(value);
var r2 = _nav2.MoveToId(value);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToChild(string a, string b)
{
var r1 = _nav1.MoveToChild(a, b);
var r2 = _nav2.MoveToChild(a, b);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToChild(XPathNodeType value)
{
var r1 = _nav1.MoveToChild(value);
var r2 = _nav2.MoveToChild(value);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFollowing(string a, string b)
{
var r1 = _nav1.MoveToFollowing(a, b);
var r2 = _nav2.MoveToFollowing(a, b);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFollowing(string a, string b, XPathNavigator c)
{
throw new NotSupportedException("MoveToFollowing(string, string, XPathNavigator) not supported.");
}
public override bool MoveToFollowing(XPathNodeType value)
{
var r1 = _nav1.MoveToFollowing(value);
var r2 = _nav2.MoveToFollowing(value);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToFollowing(XPathNodeType a, XPathNavigator b)
{
var r1 = _nav1.MoveToFollowing(a, b);
var r2 = _nav2.MoveToFollowing(a, b);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToNext(string a, string b)
{
var r1 = _nav1.MoveToNext(a, b);
var r2 = _nav2.MoveToNext(a, b);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool MoveToNext(XPathNodeType value)
{
var r1 = _nav1.MoveToNext(value);
var r2 = _nav2.MoveToNext(value);
Assert.Equal(r1, r2);
CompareNavigators(_nav1, _nav2);
return r1;
}
public override bool HasChildren
{
get
{
var r1 = _nav1.HasChildren;
var r2 = _nav2.HasChildren;
Assert.Equal(r1, r2);
return r1;
}
}
public override bool IsSamePosition(XPathNavigator value)
{
NavigatorComparer comp = value as NavigatorComparer;
if (comp != null)
{
var r1 = _nav1.IsSamePosition(comp._nav1);
var r2 = _nav2.IsSamePosition(comp._nav2);
#if CHECK_ATTRIBUTE_ORDER
Assert.Equal(r1, r2);
#else
CompareNodeTypes(_nav1.NodeType, _nav2.NodeType);
if (!IsNamespaceOrAttribute(_nav1.NodeType))
{
Assert.Equal(r1, r2);
}
#endif
return r1;
}
else
{
throw new NotSupportedException("IsSamePosition is not supported.");
}
}
public override string Value
{
get
{
#if CHECK_ATTRIBUTE_ORDER
CompareValues(_nav1, _nav2);
#else
CompareNodeTypes(_nav1.NodeType, _nav2.NodeType);
if (!IsNamespaceOrAttribute(_nav1.NodeType))
{
CompareValues(_nav1, _nav2);
}
#endif
return _nav1.Value;
}
}
public override object ValueAs(Type value)
{
var r1 = _nav1.ValueAs(value);
return r1;
}
// consider adding in the future
//public override bool IsDescendant(XPathNavigator value)
//public override XmlNodeOrder ComparePosition(XPathNavigator value)
//public override XPathExpression Compile(string value)
//public override XPathNavigator SelectSingleNode(string value)
//public override XPathNavigator SelectSingleNode(string a, IXmlNamespaceResolver b)
//public override XPathNavigator SelectSingleNode(XPathExpression value)
//public override XPathNodeIterator Select(string value);
//public override XPathNodeIterator Select(string a, IXmlNamespaceResolver b)
//public override XPathNodeIterator Select(XPathExpression value)
//public override object Evaluate(string a)
//public override object Evaluate(string, System.Xml.IXmlNamespaceResolver)
//public override object Evaluate(System.Xml.XPath.XPathExpression)
//public override object Evaluate(System.Xml.XPath.XPathExpression, System.Xml.XPath.XPathNodeIterator)
//public override bool Matches(System.Xml.XPath.XPathExpression)
//public override bool Matches(string)
//public override System.Xml.XPath.XPathNodeIterator SelectChildren(System.Xml.XPath.XPathNodeType)
//public override System.Xml.XPath.XPathNodeIterator SelectChildren(string, string)
//public override System.Xml.XPath.XPathNodeIterator SelectAncestors(System.Xml.XPath.XPathNodeType, bool)
//public override System.Xml.XPath.XPathNodeIterator SelectAncestors(string, string, bool)
//public override System.Xml.XPath.XPathNodeIterator SelectDescendants(System.Xml.XPath.XPathNodeType, bool)
//public override System.Xml.XPath.XPathNodeIterator SelectDescendants(string, string, bool)
//public override bool get_CanEdit()
//public override System.Xml.XmlWriter PrependChild()
//public override System.Xml.XmlWriter AppendChild()
//public override System.Xml.XmlWriter InsertAfter()
//public override System.Xml.XmlWriter InsertBefore()
//public override System.Xml.XmlWriter CreateAttributes()
//public override System.Xml.XmlWriter ReplaceRange(System.Xml.XPath.XPathNavigator)
//public override void ReplaceSelf(string)
//public override void ReplaceSelf(System.Xml.XmlReader)
//public override void ReplaceSelf(System.Xml.XPath.XPathNavigator)
//public override string get_OuterXml()
//public override void set_OuterXml(string)
//public override string get_InnerXml()
//public override void set_InnerXml(string)
//public override void AppendChild(string)
//public override void AppendChild(System.Xml.XmlReader)
//public override void AppendChild(System.Xml.XPath.XPathNavigator)
//public override void PrependChild(string)
//public override void PrependChild(System.Xml.XmlReader)
//public override void PrependChild(System.Xml.XPath.XPathNavigator)
//public override void InsertBefore(string)
//public override void InsertBefore(System.Xml.XmlReader)
//public override void InsertBefore(System.Xml.XPath.XPathNavigator)
//public override void InsertAfter(string)
//public override void InsertAfter(System.Xml.XmlReader)
//public override void InsertAfter(System.Xml.XPath.XPathNavigator)
//public override void DeleteRange(System.Xml.XPath.XPathNavigator)
//public override void DeleteSelf()
//public override void PrependChildElement(string, string, string, string)
//public override void AppendChildElement(string, string, string, string)
//public override void InsertElementBefore(string, string, string, string)
//public override void InsertElementAfter(string, string, string, string)
//public override void CreateAttribute(string, string, string, string)
//public override bool Equals(object)
//public override Int32 GetHashCode()
}
}
| |
// ***********************************************************************
// Copyright (c) 2012 Charlie Poole
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
using System;
using System.Text;
using System.Collections;
using System.Globalization;
namespace NUnit.Framework.Constraints
{
/// <summary>
/// Static methods used in creating messages
/// </summary>
internal static class MsgUtils
{
/// <summary>
/// Static string used when strings are clipped
/// </summary>
private const string ELLIPSIS = "...";
/// <summary>
/// Formatting strings used for expected and actual _values
/// </summary>
private static readonly string Fmt_Null = "null";
private static readonly string Fmt_EmptyString = "<string.Empty>";
private static readonly string Fmt_EmptyCollection = "<empty>";
private static readonly string Fmt_String = "\"{0}\"";
private static readonly string Fmt_Char = "'{0}'";
private static readonly string Fmt_DateTime = "yyyy-MM-dd HH:mm:ss.fff";
private static readonly string Fmt_ValueType = "{0}";
private static readonly string Fmt_Default = "<{0}>";
/// <summary>
/// Formats text to represent a generalized value.
/// </summary>
/// <param name="val">The value</param>
/// <returns>The formatted text</returns>
public static string FormatValue(object val)
{
if (val == null)
return Fmt_Null;
if (val.GetType().IsArray)
return FormatArray((Array)val);
if (val is string)
return FormatString((string)val);
if (val is IEnumerable)
return FormatCollection((IEnumerable)val, 0, 10);
if (val is char)
return string.Format(Fmt_Char, val);
if (val is double)
return FormatDouble((double)val);
if (val is float)
return FormatFloat((float)val);
if (val is decimal)
return FormatDecimal((decimal)val);
if (val is DateTime)
return FormatDateTime((DateTime)val);
if (val is ValueType)
return string.Format(Fmt_ValueType, val);
#if NETCF
var vi = val as System.Reflection.MethodInfo;
if (vi != null && vi.IsGenericMethodDefinition)
return string.Format(Fmt_Default, vi.Name + "<>");
#endif
return string.Format(Fmt_Default, val);
}
/// <summary>
/// Formats text for a collection value,
/// starting at a particular point, to a max length
/// </summary>
/// <param name="collection">The collection containing elements to write.</param>
/// <param name="start">The starting point of the elements to write</param>
/// <param name="max">The maximum number of elements to write</param>
public static string FormatCollection(IEnumerable collection, long start, int max)
{
int count = 0;
int index = 0;
System.Text.StringBuilder sb = new System.Text.StringBuilder();
foreach (object obj in collection)
{
if (index++ >= start)
{
if (++count > max)
break;
sb.Append(count == 1 ? "< " : ", ");
sb.Append(FormatValue(obj));
}
}
if (count == 0)
return Fmt_EmptyCollection;
if (count > max)
sb.Append("...");
sb.Append(" >");
return sb.ToString();
}
private static string FormatArray(Array array)
{
if (array.Length == 0)
return Fmt_EmptyCollection;
int rank = array.Rank;
int[] products = new int[rank];
for (int product = 1, r = rank; --r >= 0; )
products[r] = product *= array.GetLength(r);
int count = 0;
System.Text.StringBuilder sb = new System.Text.StringBuilder();
foreach (object obj in array)
{
if (count > 0)
sb.Append(", ");
bool startSegment = false;
for (int r = 0; r < rank; r++)
{
startSegment = startSegment || count % products[r] == 0;
if (startSegment) sb.Append("< ");
}
sb.Append(FormatValue(obj));
++count;
bool nextSegment = false;
for (int r = 0; r < rank; r++)
{
nextSegment = nextSegment || count % products[r] == 0;
if (nextSegment) sb.Append(" >");
}
}
return sb.ToString();
}
private static string FormatString(string s)
{
return s == string.Empty
? Fmt_EmptyString
: string.Format(Fmt_String, s);
}
private static string FormatDouble(double d)
{
if (double.IsNaN(d) || double.IsInfinity(d))
return d.ToString();
else
{
string s = d.ToString("G17", CultureInfo.InvariantCulture);
if (s.IndexOf('.') > 0)
return s + "d";
else
return s + ".0d";
}
}
private static string FormatFloat(float f)
{
if (float.IsNaN(f) || float.IsInfinity(f))
return f.ToString();
else
{
string s = f.ToString("G9", CultureInfo.InvariantCulture);
if (s.IndexOf('.') > 0)
return s + "f";
else
return s + ".0f";
}
}
private static string FormatDecimal(Decimal d)
{
return d.ToString("G29", CultureInfo.InvariantCulture) + "m";
}
private static string FormatDateTime(DateTime dt)
{
return dt.ToString(Fmt_DateTime, CultureInfo.InvariantCulture);
}
/// <summary>
/// Returns the representation of a type as used in NUnitLite.
/// This is the same as Type.ToString() except for arrays,
/// which are displayed with their declared sizes.
/// </summary>
/// <param name="obj"></param>
/// <returns></returns>
public static string GetTypeRepresentation(object obj)
{
Array array = obj as Array;
if (array == null)
return string.Format("<{0}>", obj.GetType());
StringBuilder sb = new StringBuilder();
Type elementType = array.GetType();
int nest = 0;
while (elementType.IsArray)
{
elementType = elementType.GetElementType();
++nest;
}
sb.Append(elementType.ToString());
sb.Append('[');
for (int r = 0; r < array.Rank; r++)
{
if (r > 0) sb.Append(',');
sb.Append(array.GetLength(r));
}
sb.Append(']');
while (--nest > 0)
sb.Append("[]");
return string.Format("<{0}>", sb.ToString());
}
/// <summary>
/// Converts any control characters in a string
/// to their escaped representation.
/// </summary>
/// <param name="s">The string to be converted</param>
/// <returns>The converted string</returns>
public static string EscapeControlChars(string s)
{
if (s != null)
{
StringBuilder sb = new StringBuilder();
foreach (char c in s)
{
switch (c)
{
//case '\'':
// sb.Append("\\\'");
// break;
//case '\"':
// sb.Append("\\\"");
// break;
case '\\':
sb.Append("\\\\");
break;
case '\0':
sb.Append("\\0");
break;
case '\a':
sb.Append("\\a");
break;
case '\b':
sb.Append("\\b");
break;
case '\f':
sb.Append("\\f");
break;
case '\n':
sb.Append("\\n");
break;
case '\r':
sb.Append("\\r");
break;
case '\t':
sb.Append("\\t");
break;
case '\v':
sb.Append("\\v");
break;
case '\x0085':
case '\x2028':
case '\x2029':
sb.Append(string.Format("\\x{0:X4}", (int)c));
break;
default:
sb.Append(c);
break;
}
}
s = sb.ToString();
}
return s;
}
/// <summary>
/// Return the a string representation for a set of indices into an array
/// </summary>
/// <param name="indices">Array of indices for which a string is needed</param>
public static string GetArrayIndicesAsString(int[] indices)
{
StringBuilder sb = new StringBuilder();
sb.Append('[');
for (int r = 0; r < indices.Length; r++)
{
if (r > 0) sb.Append(',');
sb.Append(indices[r].ToString());
}
sb.Append(']');
return sb.ToString();
}
/// <summary>
/// Get an array of indices representing the point in a collection or
/// array corresponding to a single int index into the collection.
/// </summary>
/// <param name="collection">The collection to which the indices apply</param>
/// <param name="index">Index in the collection</param>
/// <returns>Array of indices</returns>
public static int[] GetArrayIndicesFromCollectionIndex(IEnumerable collection, long index)
{
Array array = collection as Array;
int rank = array == null ? 1 : array.Rank;
int[] result = new int[rank];
for (int r = rank; --r > 0; )
{
int l = array.GetLength(r);
result[r] = (int)index % l;
index /= l;
}
result[0] = (int)index;
return result;
}
/// <summary>
/// Clip a string to a given length, starting at a particular offset, returning the clipped
/// string with ellipses representing the removed parts
/// </summary>
/// <param name="s">The string to be clipped</param>
/// <param name="maxStringLength">The maximum permitted length of the result string</param>
/// <param name="clipStart">The point at which to start clipping</param>
/// <returns>The clipped string</returns>
public static string ClipString(string s, int maxStringLength, int clipStart)
{
int clipLength = maxStringLength;
StringBuilder sb = new StringBuilder();
if (clipStart > 0)
{
clipLength -= ELLIPSIS.Length;
sb.Append(ELLIPSIS);
}
if (s.Length - clipStart > clipLength)
{
clipLength -= ELLIPSIS.Length;
sb.Append(s.Substring(clipStart, clipLength));
sb.Append(ELLIPSIS);
}
else if (clipStart > 0)
sb.Append(s.Substring(clipStart));
else
sb.Append(s);
return sb.ToString();
}
/// <summary>
/// Clip the expected and actual strings in a coordinated fashion,
/// so that they may be displayed together.
/// </summary>
/// <param name="expected"></param>
/// <param name="actual"></param>
/// <param name="maxDisplayLength"></param>
/// <param name="mismatch"></param>
public static void ClipExpectedAndActual(ref string expected, ref string actual, int maxDisplayLength, int mismatch)
{
// Case 1: Both strings fit on line
int maxStringLength = Math.Max(expected.Length, actual.Length);
if (maxStringLength <= maxDisplayLength)
return;
// Case 2: Assume that the tail of each string fits on line
int clipLength = maxDisplayLength - ELLIPSIS.Length;
int clipStart = maxStringLength - clipLength;
// Case 3: If it doesn't, center the mismatch position
if (clipStart > mismatch)
clipStart = Math.Max(0, mismatch - clipLength / 2);
expected = ClipString(expected, maxDisplayLength, clipStart);
actual = ClipString(actual, maxDisplayLength, clipStart);
}
/// <summary>
/// Shows the position two strings start to differ. Comparison
/// starts at the start index.
/// </summary>
/// <param name="expected">The expected string</param>
/// <param name="actual">The actual string</param>
/// <param name="istart">The index in the strings at which comparison should start</param>
/// <param name="ignoreCase">Boolean indicating whether case should be ignored</param>
/// <returns>-1 if no mismatch found, or the index where mismatch found</returns>
static public int FindMismatchPosition(string expected, string actual, int istart, bool ignoreCase)
{
int length = Math.Min(expected.Length, actual.Length);
string s1 = ignoreCase ? expected.ToLower() : expected;
string s2 = ignoreCase ? actual.ToLower() : actual;
for (int i = istart; i < length; i++)
{
if (s1[i] != s2[i])
return i;
}
//
// Strings have same content up to the length of the shorter string.
// Mismatch occurs because string lengths are different, so show
// that they start differing where the shortest string ends
//
if (expected.Length != actual.Length)
return length;
//
// Same strings : We shouldn't get here
//
return -1;
}
}
}
| |
// Copyright (c) Umbraco.
// See LICENSE for more details.
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading;
using NPoco;
using NUnit.Framework;
using Umbraco.Cms.Core;
using Umbraco.Cms.Core.Models;
using Umbraco.Cms.Core.Models.Entities;
using Umbraco.Cms.Core.Models.Membership;
using Umbraco.Cms.Core.Models.PublishedContent;
using Umbraco.Cms.Core.Persistence.Querying;
using Umbraco.Cms.Core.PropertyEditors;
using Umbraco.Cms.Core.Scoping;
using Umbraco.Cms.Core.Services;
using Umbraco.Cms.Infrastructure.Persistence;
using Umbraco.Cms.Infrastructure.Persistence.Dtos;
using Umbraco.Cms.Infrastructure.PublishedCache;
using Umbraco.Cms.Tests.Common;
using Umbraco.Cms.Tests.Common.Builders;
using Umbraco.Cms.Tests.Common.Testing;
using Umbraco.Cms.Tests.Integration.Testing;
using Umbraco.Extensions;
using Constants = Umbraco.Cms.Core.Constants;
namespace Umbraco.Cms.Tests.Integration.Umbraco.Infrastructure.Services
{
[TestFixture]
[Category("Slow")]
[UmbracoTest(Database = UmbracoTestOptions.Database.NewSchemaPerTest, PublishedRepositoryEvents = true, WithApplication = true)]
public class MemberServiceTests : UmbracoIntegrationTest
{
private IMemberTypeService MemberTypeService => GetRequiredService<IMemberTypeService>();
private IMemberService MemberService => GetRequiredService<IMemberService>();
[Test]
public void Can_Update_Member_Property_Values()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
member.SetValue("title", "title of mine");
member.SetValue("bodyText", "hello world");
MemberService.Save(member);
// re-get
member = MemberService.GetById(member.Id);
member.SetValue("title", "another title of mine"); // Change a value
member.SetValue("bodyText", null); // Clear a value
member.SetValue("author", "new author"); // Add a value
MemberService.Save(member);
// re-get
member = MemberService.GetById(member.Id);
Assert.AreEqual("another title of mine", member.GetValue("title"));
Assert.IsNull(member.GetValue("bodyText"));
Assert.AreEqual("new author", member.GetValue("author"));
}
[Test]
public void Can_Get_By_Username()
{
IMemberType memberType = MemberTypeService.Get("member");
IMember member = new Member("xname", "xemail", "xusername", "xrawpassword", memberType, true);
MemberService.Save(member);
IMember member2 = MemberService.GetByUsername(member.Username);
Assert.IsNotNull(member2);
Assert.AreEqual(member.Email, member2.Email);
}
[Test]
public void Can_Set_Last_Login_Date()
{
DateTime now = DateTime.Now;
IMemberType memberType = MemberTypeService.Get("member");
IMember member = new Member("xname", "xemail", "xusername", "xrawpassword", memberType, true)
{
LastLoginDate = now,
UpdateDate = now
};
MemberService.Save(member);
DateTime newDate = now.AddDays(10);
MemberService.SetLastLogin(member.Username, newDate);
// re-get
member = MemberService.GetById(member.Id);
Assert.That(member.LastLoginDate, Is.EqualTo(newDate).Within(1).Seconds);
Assert.That(member.UpdateDate, Is.EqualTo(newDate).Within(1).Seconds);
}
[Test]
public void Can_Create_Member_With_Properties()
{
IMemberType memberType = MemberTypeService.Get("member");
IMember member = new Member("xname", "xemail", "xusername", "xrawpassword", memberType, true);
MemberService.Save(member);
member = MemberService.GetById(member.Id);
Assert.AreEqual("xemail", member.Email);
var contentTypeFactory = new PublishedContentTypeFactory(new NoopPublishedModelFactory(), new PropertyValueConverterCollection(() => Enumerable.Empty<IPropertyValueConverter>()), GetRequiredService<IDataTypeService>());
var pmemberType = new PublishedContentType(memberType, contentTypeFactory);
var publishedSnapshotAccessor = new TestPublishedSnapshotAccessor();
var variationContextAccessor = new TestVariationContextAccessor();
IPublishedContent pmember = PublishedMember.Create(member, pmemberType, false, publishedSnapshotAccessor, variationContextAccessor, GetRequiredService<IPublishedModelFactory>());
// contains the umbracoMember... properties created when installing, on the member type
// contains the other properties, that PublishedContentType adds (BuiltinMemberProperties)
//
// TODO: see TODO in PublishedContentType, this list contains duplicates
string[] aliases = new[]
{
Constants.Conventions.Member.Comments,
Constants.Conventions.Member.FailedPasswordAttempts,
Constants.Conventions.Member.IsApproved,
Constants.Conventions.Member.IsLockedOut,
Constants.Conventions.Member.LastLockoutDate,
Constants.Conventions.Member.LastLoginDate,
Constants.Conventions.Member.LastPasswordChangeDate,
nameof(IMember.Email),
nameof(IMember.Username),
nameof(IMember.Comments),
nameof(IMember.IsApproved),
nameof(IMember.IsLockedOut),
nameof(IMember.LastLockoutDate),
nameof(IMember.CreateDate),
nameof(IMember.LastLoginDate),
nameof(IMember.LastPasswordChangeDate)
};
var properties = pmember.Properties.ToList();
Assert.IsTrue(properties.Select(x => x.Alias).ContainsAll(aliases));
IPublishedProperty email = properties[aliases.IndexOf(nameof(IMember.Email))];
Assert.AreEqual("xemail", email.GetSourceValue());
}
[Test]
public void Can_Create_Member()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
Assert.AreNotEqual(0, member.Id);
IMember foundMember = MemberService.GetById(member.Id);
Assert.IsNotNull(foundMember);
Assert.AreEqual("[email protected]", foundMember.Email);
}
[Test]
public void Can_Create_Member_With_Long_TLD_In_Email()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
Assert.AreNotEqual(0, member.Id);
IMember foundMember = MemberService.GetById(member.Id);
Assert.IsNotNull(foundMember);
Assert.AreEqual("[email protected]", foundMember.Email);
}
[Test]
public void Can_Create_Role()
{
MemberService.AddRole("MyTestRole");
IEnumerable<IMemberGroup> found = MemberService.GetAllRoles();
Assert.AreEqual(1, found.Count());
Assert.AreEqual("MyTestRole", found.Single().Name);
}
[Test]
public void Can_Create_Duplicate_Role()
{
MemberService.AddRole("MyTestRole");
MemberService.AddRole("MyTestRole");
IEnumerable<IMemberGroup> found = MemberService.GetAllRoles();
Assert.AreEqual(1, found.Count());
Assert.AreEqual("MyTestRole", found.Single().Name);
}
[Test]
public void Can_Get_All_Roles()
{
MemberService.AddRole("MyTestRole1");
MemberService.AddRole("MyTestRole2");
MemberService.AddRole("MyTestRole3");
IEnumerable<IMemberGroup> found = MemberService.GetAllRoles();
Assert.AreEqual(3, found.Count());
}
[Test]
public void Can_Get_All_Roles_IDs()
{
MemberService.AddRole("MyTestRole1");
MemberService.AddRole("MyTestRole2");
MemberService.AddRole("MyTestRole3");
IEnumerable<int> found = MemberService.GetAllRolesIds();
Assert.AreEqual(3, found.Count());
}
[Test]
public void Can_Replace_Roles()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
string[] roleNames1 = new[] { "TR1", "TR2" };
MemberService.AssignRoles(new[] { member.Id }, roleNames1);
IEnumerable<string> memberRoles = MemberService.GetAllRoles(member.Id);
CollectionAssert.AreEquivalent(roleNames1, memberRoles);
string[] roleNames2 = new[] { "TR3", "TR4" };
MemberService.ReplaceRoles(new[] { member.Id }, roleNames2);
memberRoles = MemberService.GetAllRoles(member.Id);
CollectionAssert.AreEquivalent(roleNames2, memberRoles);
}
[Test]
public void Can_Get_All_Roles_By_Member_Id()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
MemberService.AddRole("MyTestRole1");
MemberService.AddRole("MyTestRole2");
MemberService.AddRole("MyTestRole3");
MemberService.AssignRoles(new[] { member.Id }, new[] { "MyTestRole1", "MyTestRole2" });
IEnumerable<string> memberRoles = MemberService.GetAllRoles(member.Id);
Assert.AreEqual(2, memberRoles.Count());
}
[Test]
public void Can_Get_All_Roles_Ids_By_Member_Id()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
MemberService.AddRole("MyTestRole1");
MemberService.AddRole("MyTestRole2");
MemberService.AddRole("MyTestRole3");
MemberService.AssignRoles(new[] { member.Id }, new[] { "MyTestRole1", "MyTestRole2" });
IEnumerable<int> memberRoles = MemberService.GetAllRolesIds(member.Id);
Assert.AreEqual(2, memberRoles.Count());
}
[Test]
public void Can_Get_All_Roles_By_Member_Username()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
// need to test with '@' symbol in the lookup
IMember member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "[email protected]");
MemberService.Save(member2);
MemberService.AddRole("MyTestRole1");
MemberService.AddRole("MyTestRole2");
MemberService.AddRole("MyTestRole3");
MemberService.AssignRoles(new[] { member.Id, member2.Id }, new[] { "MyTestRole1", "MyTestRole2" });
IEnumerable<string> memberRoles = MemberService.GetAllRoles("test");
Assert.AreEqual(2, memberRoles.Count());
IEnumerable<string> memberRoles2 = MemberService.GetAllRoles("[email protected]");
Assert.AreEqual(2, memberRoles2.Count());
}
[Test]
public void Can_Delete_Role()
{
MemberService.AddRole("MyTestRole1");
MemberService.DeleteRole("MyTestRole1", false);
IEnumerable<IMemberGroup> memberRoles = MemberService.GetAllRoles();
Assert.AreEqual(0, memberRoles.Count());
}
[Test]
public void Throws_When_Deleting_Assigned_Role()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
MemberService.AddRole("MyTestRole1");
MemberService.AssignRoles(new[] { member.Id }, new[] { "MyTestRole1", "MyTestRole2" });
Assert.Throws<InvalidOperationException>(() => MemberService.DeleteRole("MyTestRole1", true));
}
[Test]
public void Can_Get_Members_In_Role()
{
MemberService.AddRole("MyTestRole1");
int roleId;
using (IScope scope = ScopeProvider.CreateScope())
{
roleId = scope.Database.ExecuteScalar<int>("SELECT id from umbracoNode where [text] = 'MyTestRole1'");
scope.Complete();
}
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
using (IScope scope = ScopeProvider.CreateScope())
{
scope.Database.Insert(new Member2MemberGroupDto { MemberGroup = roleId, Member = member1.Id });
scope.Database.Insert(new Member2MemberGroupDto { MemberGroup = roleId, Member = member2.Id });
scope.Complete();
}
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(2, membersInRole.Count());
}
[Test]
public void Cannot_Save_Member_With_Empty_Name()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, string.Empty, "[email protected]", "pass", "test");
// Act & Assert
Assert.Throws<ArgumentException>(() => MemberService.Save(member));
}
[TestCase("MyTestRole1", "test1", StringPropertyMatchType.StartsWith, 1)]
[TestCase("MyTestRole1", "test", StringPropertyMatchType.StartsWith, 3)]
[TestCase("MyTestRole1", "test1", StringPropertyMatchType.Exact, 1)]
[TestCase("MyTestRole1", "test", StringPropertyMatchType.Exact, 0)]
[TestCase("MyTestRole1", "st2", StringPropertyMatchType.EndsWith, 1)]
[TestCase("MyTestRole1", "test%", StringPropertyMatchType.Wildcard, 3)]
public void Find_Members_In_Role(string roleName1, string usernameToMatch, StringPropertyMatchType matchType, int resultCount)
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
Member member3 = MemberBuilder.CreateSimpleMember(memberType, "test3", "[email protected]", "pass", "test3");
MemberService.Save(member3);
MemberService.AssignRoles(new[] { member1.Id, member2.Id, member3.Id }, new[] { roleName1 });
IEnumerable<IMember> result = MemberService.FindMembersInRole(roleName1, usernameToMatch, matchType);
Assert.AreEqual(resultCount, result.Count());
}
[Test]
public void Associate_Members_To_Roles_With_Member_Id()
{
MemberService.AddRole("MyTestRole1");
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
// temp make sure they exist
Assert.IsNotNull(MemberService.GetById(member1.Id));
Assert.IsNotNull(MemberService.GetById(member2.Id));
MemberService.AssignRoles(new[] { member1.Id, member2.Id }, new[] { "MyTestRole1" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(2, membersInRole.Count());
}
[Test]
public void Associate_Members_To_Roles_With_Member_Id_Casing()
{
MemberService.AddRole("MyTestRole1");
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
// temp make sure they exist
Assert.IsNotNull(MemberService.GetById(member1.Id));
Assert.IsNotNull(MemberService.GetById(member2.Id));
MemberService.AssignRoles(new[] { member1.Id, member2.Id }, new[] { "mytestrole1" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(2, membersInRole.Count());
}
[Test]
public void Associate_Members_To_Roles_With_Member_Username()
{
MemberService.AddRole("MyTestRole1");
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
MemberService.AssignRoles(new[] { member1.Username, member2.Username }, new[] { "MyTestRole1" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(2, membersInRole.Count());
}
[Test]
public void Associate_Members_To_Roles_With_Member_Username_Containing_At_Symbols()
{
MemberService.AddRole("MyTestRole1");
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "[email protected]");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "[email protected]");
MemberService.Save(member2);
MemberService.AssignRoles(new[] { member1.Username, member2.Username }, new[] { "MyTestRole1" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(2, membersInRole.Count());
}
[Test]
public void Associate_Members_To_Roles_With_New_Role()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
// implicitly create the role
MemberService.AssignRoles(new[] { member1.Username, member2.Username }, new[] { "MyTestRole1" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(2, membersInRole.Count());
}
[Test]
public void Remove_Members_From_Roles_With_Member_Id()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
MemberService.AssignRoles(new[] { member1.Id, member2.Id }, new[] { "MyTestRole1", "MyTestRole2" });
MemberService.DissociateRoles(new[] { member1.Id }, new[] { "MyTestRole1" });
MemberService.DissociateRoles(new[] { member1.Id, member2.Id }, new[] { "MyTestRole2" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(1, membersInRole.Count());
membersInRole = MemberService.GetMembersInRole("MyTestRole2");
Assert.AreEqual(0, membersInRole.Count());
}
[Test]
public void Remove_Members_From_Roles_With_Member_Username()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member1 = MemberBuilder.CreateSimpleMember(memberType, "test1", "[email protected]", "pass", "test1");
MemberService.Save(member1);
Member member2 = MemberBuilder.CreateSimpleMember(memberType, "test2", "[email protected]", "pass", "test2");
MemberService.Save(member2);
MemberService.AssignRoles(new[] { member1.Username, member2.Username }, new[] { "MyTestRole1", "MyTestRole2" });
MemberService.DissociateRoles(new[] { member1.Username }, new[] { "MyTestRole1" });
MemberService.DissociateRoles(new[] { member1.Username, member2.Username }, new[] { "MyTestRole2" });
IEnumerable<IMember> membersInRole = MemberService.GetMembersInRole("MyTestRole1");
Assert.AreEqual(1, membersInRole.Count());
membersInRole = MemberService.GetMembersInRole("MyTestRole2");
Assert.AreEqual(0, membersInRole.Count());
}
[Test]
public void Can_Delete_member()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
MemberService.Delete(member);
IMember deleted = MemberService.GetById(member.Id);
// Assert
Assert.That(deleted, Is.Null);
}
[Test]
public void Exists_By_Username()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
IMember member2 = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "[email protected]");
MemberService.Save(member2);
Assert.IsTrue(MemberService.Exists("test"));
Assert.IsFalse(MemberService.Exists("notFound"));
Assert.IsTrue(MemberService.Exists("[email protected]"));
}
[Test]
public void Exists_By_Id()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
Assert.IsTrue(MemberService.Exists(member.Id));
Assert.IsFalse(MemberService.Exists(9876));
}
[Test]
public void Tracks_Dirty_Changes()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
IMember resolved = MemberService.GetByEmail(member.Email);
// NOTE: This will not trigger a property isDirty because this is not based on a 'Property', it is
// just a c# property of the Member object
resolved.Email = "[email protected]";
// NOTE: this WILL trigger a property isDirty because setting this c# property actually sets a value of
// the underlying 'Property'
resolved.FailedPasswordAttempts = 1234;
var dirtyMember = (ICanBeDirty)resolved;
var dirtyProperties = resolved.Properties.Where(x => x.IsDirty()).ToList();
Assert.IsTrue(dirtyMember.IsDirty());
Assert.AreEqual(1, dirtyProperties.Count);
}
[Test]
public void Get_By_Email()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
Assert.IsNotNull(MemberService.GetByEmail(member.Email));
Assert.IsNull(MemberService.GetByEmail("[email protected]"));
}
[Test]
public void Get_Member_Name()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "Test Real Name", "[email protected]", "pass", "testUsername");
MemberService.Save(member);
Assert.AreEqual("Test Real Name", member.Name);
}
[Test]
public void Get_By_Username()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
Assert.IsNotNull(MemberService.GetByUsername(member.Username));
Assert.IsNull(MemberService.GetByUsername("notFound"));
}
[Test]
public void Get_By_Object_Id()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IMember member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "pass", "test");
MemberService.Save(member);
Assert.IsNotNull(MemberService.GetById(member.Id));
Assert.IsNull(MemberService.GetById(9876));
}
[Test]
public void Get_All_Paged_Members()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
IEnumerable<IMember> found = MemberService.GetAll(0, 2, out long totalRecs);
Assert.AreEqual(2, found.Count());
Assert.AreEqual(10, totalRecs);
Assert.AreEqual("test0", found.First().Username);
Assert.AreEqual("test1", found.Last().Username);
}
[Test]
public void Get_All_Paged_Members_With_Filter()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
IEnumerable<IMember> found = MemberService.GetAll(0, 2, out long totalRecs, "username", Direction.Ascending, true, null, "Member No-");
Assert.AreEqual(2, found.Count());
Assert.AreEqual(10, totalRecs);
Assert.AreEqual("test0", found.First().Username);
Assert.AreEqual("test1", found.Last().Username);
found = MemberService.GetAll(0, 2, out totalRecs, "username", Direction.Ascending, true, null, "Member No-5");
Assert.AreEqual(1, found.Count());
Assert.AreEqual(1, totalRecs);
Assert.AreEqual("test5", found.First().Username);
}
[Test]
public void Find_By_Name_Starts_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "Bob", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindMembersByDisplayName("B", 0, 100, out long totalRecs, StringPropertyMatchType.StartsWith);
Assert.AreEqual(1, found.Count());
}
[Test]
public void Find_By_Email_Starts_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// don't find this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByEmail("tes", 0, 100, out long totalRecs, StringPropertyMatchType.StartsWith);
Assert.AreEqual(10, found.Count());
}
[Test]
public void Find_By_Email_Ends_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// include this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByEmail("test.com", 0, 100, out long totalRecs, StringPropertyMatchType.EndsWith);
Assert.AreEqual(11, found.Count());
}
[Test]
public void Find_By_Email_Contains()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// include this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByEmail("test", 0, 100, out long totalRecs, StringPropertyMatchType.Contains);
Assert.AreEqual(11, found.Count());
}
[Test]
public void Find_By_Email_Exact()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// include this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByEmail("[email protected]", 0, 100, out long totalRecs, StringPropertyMatchType.Exact);
Assert.AreEqual(1, found.Count());
}
[Test]
public void Find_By_Login_Starts_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// don't find this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByUsername("tes", 0, 100, out long totalRecs, StringPropertyMatchType.StartsWith);
Assert.AreEqual(10, found.Count());
}
[Test]
public void Find_By_Login_Ends_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// include this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByUsername("llo", 0, 100, out long totalRecs, StringPropertyMatchType.EndsWith);
Assert.AreEqual(1, found.Count());
}
[Test]
public void Find_By_Login_Contains()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// include this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hellotest");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByUsername("test", 0, 100, out long totalRecs, StringPropertyMatchType.Contains);
Assert.AreEqual(11, found.Count());
}
[Test]
public void Find_By_Login_Exact()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
// include this
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.FindByUsername("hello", 0, 100, out long totalRecs, StringPropertyMatchType.Exact);
Assert.AreEqual(1, found.Count());
}
[Test]
public void Get_By_Property_String_Value_Exact()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"title", "hello member", StringPropertyMatchType.Exact);
Assert.AreEqual(1, found.Count());
}
[Test]
public void Get_By_Property_String_Value_Contains()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"title", " member", StringPropertyMatchType.Contains);
Assert.AreEqual(11, found.Count());
}
[Test]
public void Get_By_Property_String_Value_Starts_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"title", "Member No", StringPropertyMatchType.StartsWith);
Assert.AreEqual(10, found.Count());
}
[Test]
public void Get_By_Property_String_Value_Ends_With()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("title", "title of mine");
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"title", "mine", StringPropertyMatchType.EndsWith);
Assert.AreEqual(1, found.Count());
}
[Test]
public void Get_By_Property_Int_Value_Exact()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"number")
{
Name = "Number",
DataTypeId = -51, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("number", i));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("number", 2);
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"number", 2, ValuePropertyMatchType.Exact);
Assert.AreEqual(2, found.Count());
}
[Test]
public void Get_By_Property_Int_Value_Greater_Than()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"number")
{
Name = "Number",
DataTypeId = -51, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("number", i));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("number", 10);
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"number", 3, ValuePropertyMatchType.GreaterThan);
Assert.AreEqual(7, found.Count());
}
[Test]
public void Get_By_Property_Int_Value_Greater_Than_Equal_To()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"number")
{
Name = "Number",
DataTypeId = -51, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("number", i));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("number", 10);
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"number", 3, ValuePropertyMatchType.GreaterThanOrEqualTo);
Assert.AreEqual(8, found.Count());
}
[Test]
public void Get_By_Property_Int_Value_Less_Than()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.DateTime,
ValueStorageType.Date,
"number")
{
Name = "Number",
DataTypeId = -51, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("number", i));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("number", 1);
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"number", 5, ValuePropertyMatchType.LessThan);
Assert.AreEqual(6, found.Count());
}
[Test]
public void Get_By_Property_Int_Value_Less_Than_Or_Equal()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"number")
{
Name = "Number",
DataTypeId = -51, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("number", i));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("number", 1);
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"number", 5, ValuePropertyMatchType.LessThanOrEqualTo);
Assert.AreEqual(7, found.Count());
}
[Test]
public void Get_By_Property_Date_Value_Exact()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"date")
{
Name = "Date",
DataTypeId = -36, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("date", new DateTime(2013, 12, 20, 1, i, 0)));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("date", new DateTime(2013, 12, 20, 1, 2, 0));
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"date", new DateTime(2013, 12, 20, 1, 2, 0), ValuePropertyMatchType.Exact);
Assert.AreEqual(2, found.Count());
}
[Test]
public void Get_By_Property_Date_Value_Greater_Than()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"date")
{
Name = "Date",
DataTypeId = -36, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("date", new DateTime(2013, 12, 20, 1, i, 0)));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("date", new DateTime(2013, 12, 20, 1, 10, 0));
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"date", new DateTime(2013, 12, 20, 1, 3, 0), ValuePropertyMatchType.GreaterThan);
Assert.AreEqual(7, found.Count());
}
[Test]
public void Get_By_Property_Date_Value_Greater_Than_Equal_To()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"date")
{
Name = "Date",
DataTypeId = -36, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("date", new DateTime(2013, 12, 20, 1, i, 0)));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("date", new DateTime(2013, 12, 20, 1, 10, 0));
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"date", new DateTime(2013, 12, 20, 1, 3, 0), ValuePropertyMatchType.GreaterThanOrEqualTo);
Assert.AreEqual(8, found.Count());
}
[Test]
public void Get_By_Property_Date_Value_Less_Than()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"date")
{
Name = "Date",
DataTypeId = -36, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("date", new DateTime(2013, 12, 20, 1, i, 0)));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("date", new DateTime(2013, 12, 20, 1, 1, 0));
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"date", new DateTime(2013, 12, 20, 1, 5, 0), ValuePropertyMatchType.LessThan);
Assert.AreEqual(6, found.Count());
}
[Test]
public void Get_By_Property_Date_Value_Less_Than_Or_Equal()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
memberType.AddPropertyType(
new PropertyType(
ShortStringHelper,
Constants.PropertyEditors.Aliases.Integer,
ValueStorageType.Integer,
"date")
{
Name = "Date",
DataTypeId = -36, // NOTE: This is what really determines the db type - the above definition doesn't really do anything
}, "content", "Content");
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.SetValue("date", new DateTime(2013, 12, 20, 1, i, 0)));
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue("date", new DateTime(2013, 12, 20, 1, 1, 0));
MemberService.Save(customMember);
IEnumerable<IMember> found = MemberService.GetMembersByPropertyValue(
"date", new DateTime(2013, 12, 20, 1, 5, 0), ValuePropertyMatchType.LessThanOrEqualTo);
Assert.AreEqual(7, found.Count());
}
[Test]
public void Count_All_Members()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
int found = MemberService.GetCount(MemberCountType.All);
Assert.AreEqual(11, found);
}
[Test]
public void Count_All_Locked_Members()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.IsLockedOut = i % 2 == 0);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue(Constants.Conventions.Member.IsLockedOut, true);
MemberService.Save(customMember);
int found = MemberService.GetCount(MemberCountType.LockedOut);
Assert.AreEqual(6, found);
}
[Test]
public void Count_All_Approved_Members()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
IEnumerable<IMember> members = MemberBuilder.CreateMultipleSimpleMembers(memberType, 10, (i, member) => member.IsApproved = i % 2 == 0);
MemberService.Save(members);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
customMember.SetValue(Constants.Conventions.Member.IsApproved, false);
MemberService.Save(customMember);
int found = MemberService.GetCount(MemberCountType.Approved);
Assert.AreEqual(5, found);
}
[Test]
public void Setting_Property_On_Built_In_Member_Property_When_Property_Doesnt_Exist_On_Type_Is_Ok()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
memberType.RemovePropertyType(Constants.Conventions.Member.Comments);
MemberTypeService.Save(memberType);
Assert.IsFalse(memberType.PropertyTypes.Any(x => x.Alias == Constants.Conventions.Member.Comments));
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
// this should not throw an exception
customMember.Comments = "hello world";
MemberService.Save(customMember);
IMember found = MemberService.GetById(customMember.Id);
Assert.IsTrue(found.Comments.IsNullOrWhiteSpace());
}
/// <summary>
/// Because we are forcing some of the built-ins to be Labels which have an underlying db type as nvarchar but we need
/// to ensure that the dates/int get saved to the correct column anyways.
/// </summary>
[Test]
public void Setting_DateTime_Property_On_Built_In_Member_Property_Saves_To_Correct_Column()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member member = MemberBuilder.CreateSimpleMember(memberType, "test", "[email protected]", "test", "test");
DateTime date = DateTime.Now;
member.LastLoginDate = DateTime.Now;
MemberService.Save(member);
IMember result = MemberService.GetById(member.Id);
Assert.AreEqual(
date.TruncateTo(DateTimeExtensions.DateTruncate.Second),
result.LastLoginDate.TruncateTo(DateTimeExtensions.DateTruncate.Second));
// now ensure the col is correct
ISqlContext sqlContext = GetRequiredService<ISqlContext>();
Sql<ISqlContext> sql = sqlContext.Sql().Select<PropertyDataDto>()
.From<PropertyDataDto>()
.InnerJoin<PropertyTypeDto>().On<PropertyDataDto, PropertyTypeDto>(dto => dto.PropertyTypeId, dto => dto.Id)
.InnerJoin<ContentVersionDto>().On<PropertyDataDto, ContentVersionDto>((left, right) => left.VersionId == right.Id)
.Where<ContentVersionDto>(dto => dto.NodeId == member.Id)
.Where<PropertyTypeDto>(dto => dto.Alias == Constants.Conventions.Member.LastLoginDate);
List<PropertyDataDto> colResult;
using (IScope scope = ScopeProvider.CreateScope())
{
colResult = scope.Database.Fetch<PropertyDataDto>(sql);
scope.Complete();
}
Assert.AreEqual(1, colResult.Count);
Assert.IsTrue(colResult.First().DateValue.HasValue);
Assert.IsFalse(colResult.First().IntegerValue.HasValue);
Assert.IsNull(colResult.First().TextValue);
Assert.IsNull(colResult.First().VarcharValue);
}
[Test]
public void New_Member_Approved_By_Default()
{
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
Member customMember = MemberBuilder.CreateSimpleMember(memberType, "hello", "[email protected]", "hello", "hello");
MemberService.Save(customMember);
IMember found = MemberService.GetById(customMember.Id);
Assert.IsTrue(found.IsApproved);
}
[Test]
public void Can_CreateWithIdentity()
{
// Arrange
IMemberType memberType = MemberTypeBuilder.CreateSimpleMemberType();
MemberTypeService.Save(memberType);
string username = Path.GetRandomFileName();
// Act
IMember member = MemberService.CreateMemberWithIdentity(username, $"{username}@domain.email", Path.GetFileNameWithoutExtension(username), memberType);
IMember found = MemberService.GetById(member.Id);
// Assert
Assert.IsNotNull(member, "Verifying a member instance has been created");
Assert.IsNotNull(found, "Verifying the created member instance has been retrieved");
Assert.IsTrue(found?.Name == member?.Name, "Verifying the retrieved member instance has the expected name");
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Web;
using OpenLiveWriter.Interop.Com;
using OpenLiveWriter.Interop.Windows;
using OpenLiveWriter.Localization;
namespace OpenLiveWriter.CoreServices
{
/// <summary>
/// Helper utilities for working with Urls.
/// </summary>
public class UrlHelper
{
/// <summary>
/// ALWAYS use this instead of Uri.AbsoluteUri!
///
/// The Uri.AbsoluteUri property does escaping of non-ASCII
/// characters. When done to file URIs, this results in URIs
/// that MSHTML does not know how to resolve, as it doesn't
/// know how to interpret the Uri class's UTF-8 encoding
/// scheme.
///
/// We work around this by un-escaping the escaped high
/// characters, but leave reserved URI characters alone.
///
/// Fixes bug 741447 - Images loaded from a wpost, when the %TEMP% path contains a char needing html escaping, appear as a red X.
/// Set %TEMP% to contain a char that requires html escaping.
/// Open Writer
/// Insert image
/// Save Draft
/// Click New Post
/// Open draft
/// </summary>
public static string SafeToAbsoluteUri(Uri uri)
{
string u = uri.AbsoluteUri;
// Only necessary for file URIs
if (!uri.IsFile)
return u;
return Regex.Replace(u, @"(?:%([0-9a-z]{2}))+", SafeToAbsoluteUri_Evaluator, RegexOptions.IgnoreCase);
}
private static string SafeToAbsoluteUri_Evaluator(Match m)
{
int len = m.Length / 3;
List<byte> bytes = new List<byte>(len);
foreach (Capture cap in m.Groups[1].Captures)
{
int hi = MathHelper.HexToInt(cap.Value[0]);
int lo = MathHelper.HexToInt(cap.Value[1]);
bytes.Add((byte)(lo | (hi << 4)));
}
string s = Encoding.UTF8.GetString(bytes.ToArray());
return Regex.Replace(s, @"[:/?#\[\]@%!$&'()*+,;=]", SafeToAbsoluteUri_Evaluator2);
}
private static string SafeToAbsoluteUri_Evaluator2(Match match)
{
Debug.Assert(match.Length == 1);
return "%" + ((int)match.Value[0]).ToString("X2", CultureInfo.InvariantCulture);
}
/// <summary>
/// Insures a url has leading http address
/// </summary>
/// <param name="url">The url</param>
/// <returns>The corrected url</returns>
public static string FixUpUrl(string url)
{
url = url.Trim();
if (url == string.Empty)
url = "";
else if (url.StartsWith("//", StringComparison.OrdinalIgnoreCase))
url = "http:" + url;
else if (url.IndexOf(":", StringComparison.OrdinalIgnoreCase) < 0)
url = "http://" + url;
return url;
}
/// <summary>
/// Insures that a url has a trailing slash (if it needs it)
/// </summary>
/// <param name="url">The url</param>
/// <returns>The corrected url</returns>
public static string InsureTrailingSlash(string url)
{
if ((url == null) || (url.Trim() == string.Empty))
return string.Empty;
string newUrl = url;
if (UrlHelper.GetExtensionForUrl(newUrl) == string.Empty && !newUrl.EndsWith("/", StringComparison.OrdinalIgnoreCase))
{
newUrl = newUrl + "/";
}
return newUrl;
}
/// <summary>
/// Create a valid url from a file path
/// </summary>
/// <param name="path">file path</param>
/// <returns>valid url</returns>
public static string CreateUrlFromPath(string path)
{
return SafeToAbsoluteUri(new Uri(path));
#if FALSE
// This is the old implementation which exposes problems with URL encoded characters in paths.
// (such as c:\temp\232323232%7Ffp%3A%3C%3Dot%3E2378%3D664%3D88%3B%3DXROQDF%3E2323%3A77%3B9%3B537ot1lsi.gif)
//
// It was replaced with the above implementation to address this type of bug, (bug #608613)
// The only downside of using the Uri above is that it doesn't handle UNC paths of the form:
// \\?\UNC\...
// allocate buffer to hold url
uint bufferSize = 4096 ;
StringBuilder buffer = new StringBuilder(Convert.ToInt32(bufferSize)) ;
// normalize the url
int result = Shlwapi.UrlCreateFromPath( path, buffer, ref bufferSize, 0 ) ;
// successfully converted
if ( result == HRESULT.S_OK )
{
// return URL converted to a .NET URL encoded value
string url = buffer.ToString();
url = ShlwapiFileUrlToDotnetEncodedUrl(url); //fixes bug 47859
try
{
if(new FileInfo(path).FullName != new FileInfo(new Uri(url).LocalPath).FullName)
{
Trace.Fail("Possible bug encoding/decoding path: " + path);
}
}
catch(Exception ex)
{
Trace.Fail("Exception while checking path encoding. Original path: " + path + " url from Shlwapi: " + url);
throw ex;
}
return url;
}
// didn't need conversion
else if ( result == HRESULT.S_FALSE )
{
// docs say that even if we don't need conversion it will
// copy the buffer we passed it to the result
Debug.Assert( path.Equals(buffer.ToString()) );
// return start url
return path ;
}
// unxpected error occurred!
else
{
throw new
COMException( "Error calling UrlCreateFromPath for path " + path, result ) ;
}
#endif
}
/// <summary>
/// Converts a Shlwapi encoded file URL to a dotnet encoded URL
/// (required for decoding the url string with the URI class)
/// </summary>
/// <param name="s"></param>
/// <returns></returns>
private static string ShlwapiFileUrlToDotnetEncodedUrl(string s)
{
if (!s.StartsWith(FILE_SCHEME, StringComparison.OrdinalIgnoreCase))
{
Debug.Fail("URL is not a file url:", s);
return s;
}
int bufferSize = s.Length;
bool containsUnsafeChars = false;
StringBuilder sb = new StringBuilder();
for (int i = FILE_SCHEME.Length; i < bufferSize; i++)
{
char ch = s[i];
if ((ch == '%') && (i < (bufferSize - 2))) //decode encoded hex bytes
{
int num1 = MathHelper.HexToInt(s[i + 1]);
int num2 = MathHelper.HexToInt(s[i + 2]);
if ((num1 >= 0) && (num2 >= 0))
{
byte b = (byte)((num1 << 4) | num2);
ch = (char)b;
i += 2;
}
}
if (!containsUnsafeChars) //scan for unsafe chars - fixes bug 482444
{
foreach (char unsafeChar in UNSAFE_URL_CHARS)
{
if (ch == unsafeChar)
containsUnsafeChars = true;
}
}
sb.Append(ch);
}
//re-encode the URL using the .NET urlPathEncoding scheme
string decodedUrl = sb.ToString();
string encodedUrl = FILE_SCHEME + HttpUtility.UrlPathEncode(decodedUrl);
if (containsUnsafeChars) //fixes bug 482444
{
foreach (char unsafeChar in UNSAFE_URL_CHARS)
{
string unsafeCharStr = unsafeChar + String.Empty;
encodedUrl = encodedUrl.Replace(unsafeCharStr, HttpUtility.UrlEncode(unsafeCharStr)); //URLPathEncode doesn't encode # signs.
}
}
return encodedUrl;
}
private const string FILE_SCHEME = "file:";
private static char[] UNSAFE_URL_CHARS = new char[] { '#' }; //chars not escaped by UrlPathEncode that need to be encoded in the URL
/// <summary>
/// Gets the host name portion of a url, not including the 'www'
/// or returns the full hostname if the hostname is an ip addy
/// </summary>
/// <param name="url">The url for which to get the host name</param>
/// <returns>The host name</returns>
public static string GetDomain(string url)
{
if (!IsUrl(url))
return null;
Uri uri = new Uri(url);
// If the url is at least like x.y.z, split it and return the last 2 parts
string[] parts = uri.Host.Split('.');
if (parts.Length > 2)
{
return String.Format(CultureInfo.InvariantCulture, "{0}.{1}", parts[parts.Length - 2], parts[parts.Length - 1]);
}
else
return uri.Host;
}
/// <summary>
/// Gets the host name portion of a url, not including the 'www'
/// or returns the full hostname if the hostname is an ip addy
/// </summary>
/// <param name="url">The url for which to get the host name</param>
/// <returns>The host name</returns>
public static string GetHostName(string url)
{
if (!IsUrl(url))
return null;
return new Uri(url).Host;
}
public static string GetUrlWithoutAnchorIdentifier(string url)
{
int octPosition = url.LastIndexOf('#');
if (octPosition > -1)
{
url = url.Substring(0, octPosition);
}
return url;
}
public static string GetAnchorIdentifier(string url)
{
string anchor = null;
int octPosition = url.LastIndexOf('#');
if (octPosition > -1)
{
int startPos = octPosition + 1;
anchor = url.Substring(startPos, url.Length - startPos);
}
return anchor;
}
public static string GetPrettyUrl(string url, int length)
{
if (!IsUrl(url))
return null;
if (url.Length > length)
return url.Substring(0, length - 3) + Res.Get(StringId.WithEllipses);
else
return url;
}
/// <summary>
/// Determines whether a string is a valid Url
/// </summary>
/// <param name="url">The url to validate</param>
/// <returns>true if the url is a valid url, otherwise false</returns>
public static bool IsUrl(string url)
{
if (url != null && url.IndexOf("://", StringComparison.OrdinalIgnoreCase) > -1)
{
try
{
Uri uri = new Uri(url);
}
catch (UriFormatException)
{
return false;
}
return true;
}
else
return false;
#if false
// TODO: For some reason, IsvalidURL is always returning 1 (S_FALSE)
// no matter what URL you pass into the sucker.
// Handle only the base URL
if (url.IndexOf("?") > -1)
url = url.Substring(0, url.IndexOf("?"));
int hResult = UrlMon.IsValidURL(
IntPtr.Zero,
url,
0);
switch (hResult)
{
case HRESULT.S_OK:
return true;
case HRESULT.E_INVALIDARG:
Trace.Log("IsUrl returned HRESULT.E_INVALIDARG for this url: " + url);
return false;
case HRESULT.S_FALSE:
default:
return false;
}
#endif
}
/// <summary>
/// Indicates whether the scheme of the current Url is a well known scheme. This is helpful
/// since certain urls (like outlook urls of the format outbind://173-000000007A8E4513E635304C91A43CFC57ADB0BA04F52700/)
/// will validate as legal Uris, yet for many applications will not be useful.
/// </summary>
/// <param name="url">The url for which to validate the scheme</param>
/// <returns>True if the scheme is well known, otherwise false</returns>
public static bool IsKnownScheme(string url)
{
try
{
Uri uri = new Uri(url);
foreach (string scheme in KnownSchemes)
if (uri.Scheme == scheme)
return true;
}
catch (Exception)
{
//may occur if the URL is malformed
}
return false;
}
/// <summary>
/// Returns true if the string starts with a known scheme, even if it is not a valid URL.
/// </summary>
public static bool StartsWithKnownScheme(string url)
{
foreach (string scheme in KnownSchemes)
if (url.StartsWith(scheme, StringComparison.OrdinalIgnoreCase))
return true;
return false;
}
/// <summary>
/// The list of well known schemes
/// </summary>
private static string[] KnownSchemes = new string[] { Uri.UriSchemeFile, Uri.UriSchemeFtp, Uri.UriSchemeGopher,
Uri.UriSchemeHttp, Uri.UriSchemeHttps, Uri.UriSchemeMailto,
Uri.UriSchemeNews, Uri.UriSchemeNntp, "telnet", "wais", "ldap" };
/// <summary>
/// Gets the file extension for a url (including ignoring query strings and the like)
/// </summary>
/// <param name="url">The url for which to get the file extension</param>
/// <returns>The file extension (with the .)</returns>
public static string GetExtensionForUrl(string url)
{
// Try to discard the query string, if possible
if (IsUrl(url))
{
url = new Uri(url).GetLeftPart(UriPartial.Path);
}
return Path.GetExtension(url);
}
/// <summary>
/// Returns the name of the file for a given URL
/// </summary>
/// <param name="url">The url</param>
/// <returns>The name of the file</returns>
public static string GetFileNameWithoutExtensionForUrl(string url)
{
// Try to discard the query string, if possible
if (IsUrl(url))
{
url = new Uri(url).GetLeftPart(UriPartial.Path);
}
return Path.GetFileNameWithoutExtension(url);
}
/// <summary>
/// Returns the name of the file for a given URL
/// </summary>
/// <param name="url">The url</param>
/// <returns>The name of the file</returns>
public static string GetFileNameForUrl(string url)
{
// Try to discard the query string, if possible
if (IsUrl(url))
{
url = new Uri(url).GetLeftPart(UriPartial.Path);
}
return Path.GetFileName(url);
}
/// <summary>
/// Gets the content type for a given url
/// </summary>
/// <param name="url">The url</param>
/// <returns>The content type</returns>
public static UrlContentTypeInfo GetUrlContentType(string url)
{
return ContentTypeHelper.ExpensivelyGetUrlContentType(url, 5000);
}
public static bool IsFileUrl(string url)
{
if (IsUrl(url))
return new Uri(url).IsFile;
else
return false;
}
public static bool IsFile(string url)
{
try
{
return new Uri(url).IsFile;
}
catch (UriFormatException)
{
return false;
}
}
public static bool UrlsAreEqual(string url, string secondUrl)
{
if (UrlHelper.IsUrl(url) && UrlHelper.IsUrl(secondUrl))
{
Uri uri = new Uri(url);
Uri secondUri = new Uri(secondUrl);
return SafeToAbsoluteUri(uri) == SafeToAbsoluteUri(secondUri);
}
else
return url == secondUrl;
}
/// <summary>
/// Escapes a relative URL against a HTML document contained in an IDataObject
/// </summary>
/// <param name="baseUrl">The root url against which to escape the relative
/// URL</param>
/// <param name="relativeUrl">The relative URL to escape</param>
/// <returns>The absolute path</returns>
public static string EscapeRelativeURL(string baseUrl, string relativeUrl)
{
// Handle urls that don't contain the base url
if (relativeUrl.StartsWith(AboutBlank, StringComparison.OrdinalIgnoreCase))
{
relativeUrl = relativeUrl.Replace(AboutBlank, "");
}
if (relativeUrl.StartsWith(About, StringComparison.OrdinalIgnoreCase))
{
relativeUrl = relativeUrl.Replace(About, "");
}
if (relativeUrl.StartsWith("//", StringComparison.OrdinalIgnoreCase))
{
relativeUrl = relativeUrl.Replace("//", "http://");
}
if (!UrlHelper.IsUrl(relativeUrl) && baseUrl != null && baseUrl != string.Empty)
{
//bug fix: Use UrlCombine() to compensate for bugs in Shlwapi.UrlCombine()
relativeUrl = UrlCombine(baseUrl, relativeUrl);
}
return relativeUrl;
}
/// <summary>
/// Returns url with appended query string
/// For example, if url is http://blah?a=1 and parameters is "b=2", this will return http://blah?a=1&b=2
/// </summary>
/// <param name="url"></param>
/// <param name="parameters"></param>
/// <returns></returns>
public static string AppendQueryParameters(string url, string[] parameters)
{
if (String.IsNullOrEmpty(url))
return url;
Debug.Assert(IsUrl(url));
Uri uri = new Uri(url);
string appendedUrl = url;
bool queryWasEmpty = String.IsNullOrEmpty(uri.Query);
if (parameters.Length > 0)
{
if (queryWasEmpty)
appendedUrl += "?";
else
appendedUrl += "&";
}
Debug.Assert(!new List<string>(parameters).Exists((p) => String.IsNullOrEmpty(p)));
appendedUrl += String.Join("&", parameters);
Debug.Assert(IsUrl(appendedUrl));
return appendedUrl;
}
/// <summary>
/// Gets the base Url from a full Url
/// The baseUrl is considered the scheme+hostname with only the root path.
/// Use GetBasePathUrl if you want the base directory of a file URL.
///
/// </summary>
/// <param name="url">The url</param>
/// <returns>The base url</returns>
public static string GetBaseUrl(string url)
{
if (url == null || url.Length == 0)
return String.Empty;
try
{
Uri uri = new Uri(url);
return uri.GetLeftPart(UriPartial.Authority);
}
catch (Exception e)
{
if (!url.StartsWith("outbind", StringComparison.OrdinalIgnoreCase)) // Outlook URLs (pre-2007) don't have base URLs
Trace.Fail("Failed to parse URI: " + url + "\r\n" + e.ToString());
return string.Empty;
}
}
public static string GetServerPath(string url)
{
if (url == null || url.Length == 0)
return String.Empty;
Uri uri = new Uri(url);
return uri.AbsolutePath;
}
/// <summary>
/// Returns the base path from a full Url.
/// This basically strips the filename from a URL leaving oonly its directory.
/// Warning: if a URL directory is passed in without a trailing slash, the directory
/// name will be considered a filename, and will be stripped.
/// </summary>
/// <param name="url"></param>
/// <returns></returns>
public static string GetBasePathUrl(string url)
{
if (String.IsNullOrEmpty(url))
return string.Empty;
try
{
//trim off everything after the last slash in the path
UriBuilder uriBuilder = new UriBuilder(new Uri(url));
int index = uriBuilder.Path.LastIndexOf("/", StringComparison.OrdinalIgnoreCase);
if (index != -1)
{
uriBuilder.Path = uriBuilder.Path.Substring(0, index + 1);
uriBuilder.Fragment = string.Empty;
uriBuilder.Query = string.Empty;
}
string baseUrl = SafeToAbsoluteUri(uriBuilder.Uri);
return baseUrl;
}
catch (Exception e)
{
if (!url.StartsWith("outbind", StringComparison.OrdinalIgnoreCase)) // Outlook URLs (pre-2007) don't have base URLs
Trace.Fail("Failed to parse URI: " + url + "\r\n" + e.ToString());
return string.Empty;
}
}
/// <summary>
/// Cleans up a url
/// </summary>
/// <param name="url">The url to clean up</param>
/// <returns>The clean url</returns>
public static string CleanUpUrl(string url)
{
if (url != null)
{
url = url.Replace("&", "&");
url = url.Replace(AboutBlank, "");
}
return url;
}
/// <summary>
/// Returns a hashtable of the query parameters parsed from a URL
/// </summary>
/// <param name="url">The url from which to get the query parameters (Note: This will be fooled
/// by html escaped strings such as & in the url)</param>
/// <returns>A hashtable of the parameters</returns>
public static Hashtable GetQueryParams(string url)
{
Hashtable queryParams = new DefaultHashtable(new DefaultHashtable.DefaultValuePump(DefaultHashtable.ArrayListDefault));
Uri uri = new Uri(url);
string trimmedUrl = uri.Query.TrimStart('?');
string[] pairs = trimmedUrl.Split('&');
foreach (string pair in pairs)
{
string[] splitPairs = pair.Split('=');
if (splitPairs.Length == 2)
((ArrayList)queryParams[splitPairs[0]]).Add(splitPairs[1]);
}
return queryParams;
}
/// <summary>
/// Helper to get the value of a named query string parameter.
/// </summary>
/// <param name="url">Valid url to parse for the parameter</param>
/// <param name="paramName">The name of the parameter whose value will be returned</param>
/// <returns>null, if paramName does not exist in query string</returns>
public static string GetQueryParamValue(string url, string paramName)
{
if (String.IsNullOrEmpty(url))
throw new ArgumentException("Invalid url.");
if (String.IsNullOrEmpty(paramName))
throw new ArgumentException("Invalid parameter name.");
Hashtable queryParams = GetQueryParams(url);
if (queryParams.ContainsKey(paramName))
{
if (((ArrayList)queryParams[paramName]).Count == 0)
{
Debug.Fail("The implementation of GetQueryParams changed, breaking this!");
return null;
}
return (string)((ArrayList)queryParams[paramName])[0];
}
return null;
}
public static byte[] GetShortcutFileBytesForUrl(string url)
{
// TODO: make this use the URL creation API instead of hacking together the file contents
string urlFile = "[DEFAULT]\nBASEURL=" + url + "\n[InternetShortcut]\nURL=" + url + "\nModified=1";
char[] chars = urlFile.ToCharArray();
byte[] bytes = new byte[urlFile.Length];
for (int i = 0; i < bytes.Length; i++)
{
bytes[i] = (byte)(chars[i] & 0xFF);
}
return bytes;
}
public static string GenerateShortcutFileForUrl(string fileName, string url)
{
byte[] shortcutFileBytes = GetShortcutFileBytesForUrl(url);
string shortcutFile = TempFileManager.Instance.CreateTempFile(fileName + ".url");
using (FileStream stream = new FileStream(shortcutFile, FileMode.Create, FileAccess.Write))
stream.Write(shortcutFileBytes, 0, shortcutFileBytes.Length);
return shortcutFile;
}
public static string GetLocalFileUrl(string filePath)
{
// Set dontEscape flag to true so that we prevent URI escaping from
// replacing high ascii or unicode characters which can make the path be invalid
// We've been running on .Net 2.0 for some time in the field without error
// (.Net 2.0 changed the behavior of the Uri constructor to ignore the dontEscape bool) so
// we feel safe simply dropping that value.
return SafeToAbsoluteUri(new Uri(filePath));
}
public static string GetUrlFromShortCutFile(string pathToShortCut)
{
const int URL_BUFFER_SIZE = 4096;
const string SECTION_NAME = "InternetShortcut";
const string KEY_NAME = "URL";
if (PathHelper.IsPathUrlFile(pathToShortCut))
{
// Read the INI file entry
StringBuilder urlBuilder = new StringBuilder(URL_BUFFER_SIZE, URL_BUFFER_SIZE);
Kernel32.GetPrivateProfileString(
SECTION_NAME,
KEY_NAME,
"(no URL Found)",
urlBuilder,
URL_BUFFER_SIZE,
pathToShortCut
);
return urlBuilder.ToString();
}
else
return null;
}
/// <summary>
/// Combines a basepath and relative path to create a complete URL.
/// Note: this utility provides workarounds to problems with Shlwapi.UrlCombine()
/// </summary>
public static string UrlCombine(string baseUrl, string relativeUrl)
{
//bug fix: Shlwapi.UrlCombine() fails with combining paths for mhtml:file:// URLs
if (!baseUrl.StartsWith("mhtml:file://", StringComparison.OrdinalIgnoreCase))
{
// UrlCombine is escaping the urls, which means if the URL is already escaped, it is being
// double escaped.
if (IsUrl(baseUrl))
baseUrl = HttpUtility.UrlDecode(baseUrl);
relativeUrl = Shlwapi.UrlCombine(
baseUrl,
relativeUrl);
}
else
{
while (relativeUrl.StartsWith("/", StringComparison.OrdinalIgnoreCase))
relativeUrl = relativeUrl.Substring(1);
if (baseUrl.IndexOf("!", StringComparison.OrdinalIgnoreCase) > -1)
{
baseUrl = baseUrl.Split('!')[0];
}
relativeUrl = String.Format(CultureInfo.InvariantCulture, "{0}!{1}", baseUrl, relativeUrl);
}
return relativeUrl;
}
public static string UrlCombineIfRelative(string baseUrl, string relativeUrl)
{
if (UrlHelper.IsUrl(relativeUrl) || baseUrl == null)
return relativeUrl;
else
return UrlCombine(baseUrl, relativeUrl);
}
public static string GetSavedFromString(string url)
{
return string.Format(CultureInfo.InvariantCulture, "<!-- saved from url=({0:0000}){1} -->\r\n", url.Length, url);
}
public static bool IsUrlLinkable(string url)
{
if (UrlHelper.IsUrl(url))
{
Uri uri = new Uri(url);
foreach (string scheme in NonlinkableSchemes)
if (uri.Scheme == scheme)
return false;
}
return true;
}
private static string[] NonlinkableSchemes = new string[] { Uri.UriSchemeFile };
public static bool IsUrlDownloadable(string url)
{
bool isDownloadable = false;
if (UrlHelper.IsUrl(url))
{
Uri uri = new Uri(url);
foreach (string scheme in DownloadableSchemes)
if (uri.Scheme == scheme)
{
isDownloadable = true;
break;
}
}
return isDownloadable;
}
private static string[] DownloadableSchemes = new string[] { Uri.UriSchemeFile, Uri.UriSchemeHttp, Uri.UriSchemeHttps };
public static readonly string AboutBlank = "about:blank";
public static readonly string About = "about:";
}
}
| |
namespace Humidifier.S3
{
using System.Collections.Generic;
using BucketTypes;
public class Bucket : Humidifier.Resource
{
public static class Attributes
{
public static string Arn = "Arn" ;
public static string DomainName = "DomainName" ;
public static string DualStackDomainName = "DualStackDomainName" ;
public static string RegionalDomainName = "RegionalDomainName" ;
public static string WebsiteURL = "WebsiteURL" ;
}
public override string AWSTypeName
{
get
{
return @"AWS::S3::Bucket";
}
}
/// <summary>
/// AccelerateConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-accelerateconfiguration
/// Required: False
/// UpdateType: Mutable
/// Type: AccelerateConfiguration
/// </summary>
public AccelerateConfiguration AccelerateConfiguration
{
get;
set;
}
/// <summary>
/// AccessControl
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-accesscontrol
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic AccessControl
{
get;
set;
}
/// <summary>
/// AnalyticsConfigurations
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-analyticsconfigurations
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: AnalyticsConfiguration
/// </summary>
public List<AnalyticsConfiguration> AnalyticsConfigurations
{
get;
set;
}
/// <summary>
/// BucketEncryption
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-bucketencryption
/// Required: False
/// UpdateType: Mutable
/// Type: BucketEncryption
/// </summary>
public BucketEncryption BucketEncryption
{
get;
set;
}
/// <summary>
/// BucketName
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-name
/// Required: False
/// UpdateType: Immutable
/// PrimitiveType: String
/// </summary>
public dynamic BucketName
{
get;
set;
}
/// <summary>
/// CorsConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-crossoriginconfig
/// Required: False
/// UpdateType: Mutable
/// Type: CorsConfiguration
/// </summary>
public CorsConfiguration CorsConfiguration
{
get;
set;
}
/// <summary>
/// InventoryConfigurations
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-inventoryconfigurations
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: InventoryConfiguration
/// </summary>
public List<InventoryConfiguration> InventoryConfigurations
{
get;
set;
}
/// <summary>
/// LifecycleConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-lifecycleconfig
/// Required: False
/// UpdateType: Mutable
/// Type: LifecycleConfiguration
/// </summary>
public LifecycleConfiguration LifecycleConfiguration
{
get;
set;
}
/// <summary>
/// LoggingConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-loggingconfig
/// Required: False
/// UpdateType: Mutable
/// Type: LoggingConfiguration
/// </summary>
public LoggingConfiguration LoggingConfiguration
{
get;
set;
}
/// <summary>
/// MetricsConfigurations
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-metricsconfigurations
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: MetricsConfiguration
/// </summary>
public List<MetricsConfiguration> MetricsConfigurations
{
get;
set;
}
/// <summary>
/// NotificationConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-notification
/// Required: False
/// UpdateType: Mutable
/// Type: NotificationConfiguration
/// </summary>
public NotificationConfiguration NotificationConfiguration
{
get;
set;
}
/// <summary>
/// ObjectLockConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-objectlockconfiguration
/// Required: False
/// UpdateType: Mutable
/// Type: ObjectLockConfiguration
/// </summary>
public ObjectLockConfiguration ObjectLockConfiguration
{
get;
set;
}
/// <summary>
/// ObjectLockEnabled
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-objectlockenabled
/// Required: False
/// UpdateType: Immutable
/// PrimitiveType: Boolean
/// </summary>
public dynamic ObjectLockEnabled
{
get;
set;
}
/// <summary>
/// PublicAccessBlockConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-publicaccessblockconfiguration
/// Required: False
/// UpdateType: Mutable
/// Type: PublicAccessBlockConfiguration
/// </summary>
public PublicAccessBlockConfiguration PublicAccessBlockConfiguration
{
get;
set;
}
/// <summary>
/// ReplicationConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-replicationconfiguration
/// Required: False
/// UpdateType: Mutable
/// Type: ReplicationConfiguration
/// </summary>
public ReplicationConfiguration ReplicationConfiguration
{
get;
set;
}
/// <summary>
/// Tags
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-tags
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: Tag
/// </summary>
public List<Tag> Tags
{
get;
set;
}
/// <summary>
/// VersioningConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-versioning
/// Required: False
/// UpdateType: Mutable
/// Type: VersioningConfiguration
/// </summary>
public VersioningConfiguration VersioningConfiguration
{
get;
set;
}
/// <summary>
/// WebsiteConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket.html#cfn-s3-bucket-websiteconfiguration
/// Required: False
/// UpdateType: Mutable
/// Type: WebsiteConfiguration
/// </summary>
public WebsiteConfiguration WebsiteConfiguration
{
get;
set;
}
}
namespace BucketTypes
{
public class BucketEncryption
{
/// <summary>
/// ServerSideEncryptionConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-bucketencryption.html#cfn-s3-bucket-bucketencryption-serversideencryptionconfiguration
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// ItemType: ServerSideEncryptionRule
/// </summary>
public List<ServerSideEncryptionRule> ServerSideEncryptionConfiguration
{
get;
set;
}
}
public class RoutingRuleCondition
{
/// <summary>
/// HttpErrorCodeReturnedEquals
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-routingrulecondition.html#cfn-s3-websiteconfiguration-routingrules-routingrulecondition-httperrorcodereturnedequals
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic HttpErrorCodeReturnedEquals
{
get;
set;
}
/// <summary>
/// KeyPrefixEquals
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-routingrulecondition.html#cfn-s3-websiteconfiguration-routingrules-routingrulecondition-keyprefixequals
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic KeyPrefixEquals
{
get;
set;
}
}
public class LifecycleConfiguration
{
/// <summary>
/// Rules
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig.html#cfn-s3-bucket-lifecycleconfig-rules
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// ItemType: Rule
/// </summary>
public List<Rule> Rules
{
get;
set;
}
}
public class CorsRule
{
/// <summary>
/// AllowedHeaders
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors-corsrule.html#cfn-s3-bucket-cors-corsrule-allowedheaders
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// PrimitiveItemType: String
/// </summary>
public dynamic AllowedHeaders
{
get;
set;
}
/// <summary>
/// AllowedMethods
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors-corsrule.html#cfn-s3-bucket-cors-corsrule-allowedmethods
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// PrimitiveItemType: String
/// </summary>
public dynamic AllowedMethods
{
get;
set;
}
/// <summary>
/// AllowedOrigins
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors-corsrule.html#cfn-s3-bucket-cors-corsrule-allowedorigins
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// PrimitiveItemType: String
/// </summary>
public dynamic AllowedOrigins
{
get;
set;
}
/// <summary>
/// ExposedHeaders
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors-corsrule.html#cfn-s3-bucket-cors-corsrule-exposedheaders
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// PrimitiveItemType: String
/// </summary>
public dynamic ExposedHeaders
{
get;
set;
}
/// <summary>
/// Id
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors-corsrule.html#cfn-s3-bucket-cors-corsrule-id
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Id
{
get;
set;
}
/// <summary>
/// MaxAge
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors-corsrule.html#cfn-s3-bucket-cors-corsrule-maxage
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic MaxAge
{
get;
set;
}
}
public class Destination
{
/// <summary>
/// BucketAccountId
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-destination.html#cfn-s3-bucket-destination-bucketaccountid
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic BucketAccountId
{
get;
set;
}
/// <summary>
/// BucketArn
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-destination.html#cfn-s3-bucket-destination-bucketarn
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic BucketArn
{
get;
set;
}
/// <summary>
/// Format
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-destination.html#cfn-s3-bucket-destination-format
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Format
{
get;
set;
}
/// <summary>
/// Prefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-destination.html#cfn-s3-bucket-destination-prefix
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Prefix
{
get;
set;
}
}
public class AccessControlTranslation
{
/// <summary>
/// Owner
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-accesscontroltranslation.html#cfn-s3-bucket-accesscontroltranslation-owner
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Owner
{
get;
set;
}
}
public class VersioningConfiguration
{
/// <summary>
/// Status
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-versioningconfig.html#cfn-s3-bucket-versioningconfig-status
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Status
{
get;
set;
}
}
public class NotificationConfiguration
{
/// <summary>
/// LambdaConfigurations
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig.html#cfn-s3-bucket-notificationconfig-lambdaconfig
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: LambdaConfiguration
/// </summary>
public List<LambdaConfiguration> LambdaConfigurations
{
get;
set;
}
/// <summary>
/// QueueConfigurations
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig.html#cfn-s3-bucket-notificationconfig-queueconfig
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: QueueConfiguration
/// </summary>
public List<QueueConfiguration> QueueConfigurations
{
get;
set;
}
/// <summary>
/// TopicConfigurations
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig.html#cfn-s3-bucket-notificationconfig-topicconfig
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: TopicConfiguration
/// </summary>
public List<TopicConfiguration> TopicConfigurations
{
get;
set;
}
}
public class ServerSideEncryptionByDefault
{
/// <summary>
/// KMSMasterKeyID
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-serversideencryptionbydefault.html#cfn-s3-bucket-serversideencryptionbydefault-kmsmasterkeyid
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic KMSMasterKeyID
{
get;
set;
}
/// <summary>
/// SSEAlgorithm
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-serversideencryptionbydefault.html#cfn-s3-bucket-serversideencryptionbydefault-ssealgorithm
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic SSEAlgorithm
{
get;
set;
}
}
public class RedirectAllRequestsTo
{
/// <summary>
/// HostName
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-redirectallrequeststo.html#cfn-s3-websiteconfiguration-redirectallrequeststo-hostname
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic HostName
{
get;
set;
}
/// <summary>
/// Protocol
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-redirectallrequeststo.html#cfn-s3-websiteconfiguration-redirectallrequeststo-protocol
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Protocol
{
get;
set;
}
}
public class S3KeyFilter
{
/// <summary>
/// Rules
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfiguration-config-filter-s3key.html#cfn-s3-bucket-notificationconfiguraiton-config-filter-s3key-rules
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// ItemType: FilterRule
/// </summary>
public List<FilterRule> Rules
{
get;
set;
}
}
public class InventoryConfiguration
{
/// <summary>
/// Destination
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-destination
/// Required: True
/// UpdateType: Mutable
/// Type: Destination
/// </summary>
public Destination Destination
{
get;
set;
}
/// <summary>
/// Enabled
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-enabled
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: Boolean
/// </summary>
public dynamic Enabled
{
get;
set;
}
/// <summary>
/// Id
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-id
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Id
{
get;
set;
}
/// <summary>
/// IncludedObjectVersions
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-includedobjectversions
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic IncludedObjectVersions
{
get;
set;
}
/// <summary>
/// OptionalFields
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-optionalfields
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// PrimitiveItemType: String
/// </summary>
public dynamic OptionalFields
{
get;
set;
}
/// <summary>
/// Prefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-prefix
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Prefix
{
get;
set;
}
/// <summary>
/// ScheduleFrequency
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-inventoryconfiguration.html#cfn-s3-bucket-inventoryconfiguration-schedulefrequency
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic ScheduleFrequency
{
get;
set;
}
}
public class ReplicationConfiguration
{
/// <summary>
/// Role
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration.html#cfn-s3-bucket-replicationconfiguration-role
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Role
{
get;
set;
}
/// <summary>
/// Rules
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration.html#cfn-s3-bucket-replicationconfiguration-rules
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// ItemType: ReplicationRule
/// </summary>
public List<ReplicationRule> Rules
{
get;
set;
}
}
public class SseKmsEncryptedObjects
{
/// <summary>
/// Status
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-ssekmsencryptedobjects.html#cfn-s3-bucket-ssekmsencryptedobjects-status
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Status
{
get;
set;
}
}
public class QueueConfiguration
{
/// <summary>
/// Event
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-queueconfig.html#cfn-s3-bucket-notificationconfig-queueconfig-event
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Event
{
get;
set;
}
/// <summary>
/// Filter
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-queueconfig.html#cfn-s3-bucket-notificationconfig-queueconfig-filter
/// Required: False
/// UpdateType: Mutable
/// Type: NotificationFilter
/// </summary>
public NotificationFilter Filter
{
get;
set;
}
/// <summary>
/// Queue
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-queueconfig.html#cfn-s3-bucket-notificationconfig-queueconfig-queue
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Queue
{
get;
set;
}
}
public class ObjectLockConfiguration
{
/// <summary>
/// ObjectLockEnabled
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-objectlockconfiguration.html#cfn-s3-bucket-objectlockconfiguration-objectlockenabled
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic ObjectLockEnabled
{
get;
set;
}
/// <summary>
/// Rule
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-objectlockconfiguration.html#cfn-s3-bucket-objectlockconfiguration-rule
/// Required: False
/// UpdateType: Mutable
/// Type: ObjectLockRule
/// </summary>
public ObjectLockRule Rule
{
get;
set;
}
}
public class CorsConfiguration
{
/// <summary>
/// CorsRules
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-cors.html#cfn-s3-bucket-cors-corsrule
/// Required: True
/// UpdateType: Mutable
/// Type: List
/// ItemType: CorsRule
/// </summary>
public List<CorsRule> CorsRules
{
get;
set;
}
}
public class ReplicationDestination
{
/// <summary>
/// AccessControlTranslation
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules-destination.html#cfn-s3-bucket-replicationdestination-accesscontroltranslation
/// Required: False
/// UpdateType: Mutable
/// Type: AccessControlTranslation
/// </summary>
public AccessControlTranslation AccessControlTranslation
{
get;
set;
}
/// <summary>
/// Account
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules-destination.html#cfn-s3-bucket-replicationdestination-account
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Account
{
get;
set;
}
/// <summary>
/// Bucket
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules-destination.html#cfn-s3-bucket-replicationconfiguration-rules-destination-bucket
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Bucket
{
get;
set;
}
/// <summary>
/// EncryptionConfiguration
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules-destination.html#cfn-s3-bucket-replicationdestination-encryptionconfiguration
/// Required: False
/// UpdateType: Mutable
/// Type: EncryptionConfiguration
/// </summary>
public EncryptionConfiguration EncryptionConfiguration
{
get;
set;
}
/// <summary>
/// StorageClass
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules-destination.html#cfn-s3-bucket-replicationconfiguration-rules-destination-storageclass
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic StorageClass
{
get;
set;
}
}
public class AccelerateConfiguration
{
/// <summary>
/// AccelerationStatus
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-accelerateconfiguration.html#cfn-s3-bucket-accelerateconfiguration-accelerationstatus
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic AccelerationStatus
{
get;
set;
}
}
public class NoncurrentVersionTransition
{
/// <summary>
/// StorageClass
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule-noncurrentversiontransition.html#cfn-s3-bucket-lifecycleconfig-rule-noncurrentversiontransition-storageclass
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic StorageClass
{
get;
set;
}
/// <summary>
/// TransitionInDays
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule-noncurrentversiontransition.html#cfn-s3-bucket-lifecycleconfig-rule-noncurrentversiontransition-transitionindays
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic TransitionInDays
{
get;
set;
}
}
public class AbortIncompleteMultipartUpload
{
/// <summary>
/// DaysAfterInitiation
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-abortincompletemultipartupload.html#cfn-s3-bucket-abortincompletemultipartupload-daysafterinitiation
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic DaysAfterInitiation
{
get;
set;
}
}
public class PublicAccessBlockConfiguration
{
/// <summary>
/// BlockPublicAcls
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-publicaccessblockconfiguration.html#cfn-s3-bucket-publicaccessblockconfiguration-blockpublicacls
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Boolean
/// </summary>
public dynamic BlockPublicAcls
{
get;
set;
}
/// <summary>
/// BlockPublicPolicy
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-publicaccessblockconfiguration.html#cfn-s3-bucket-publicaccessblockconfiguration-blockpublicpolicy
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Boolean
/// </summary>
public dynamic BlockPublicPolicy
{
get;
set;
}
/// <summary>
/// IgnorePublicAcls
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-publicaccessblockconfiguration.html#cfn-s3-bucket-publicaccessblockconfiguration-ignorepublicacls
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Boolean
/// </summary>
public dynamic IgnorePublicAcls
{
get;
set;
}
/// <summary>
/// RestrictPublicBuckets
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-publicaccessblockconfiguration.html#cfn-s3-bucket-publicaccessblockconfiguration-restrictpublicbuckets
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Boolean
/// </summary>
public dynamic RestrictPublicBuckets
{
get;
set;
}
}
public class DefaultRetention
{
/// <summary>
/// Days
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-defaultretention.html#cfn-s3-bucket-defaultretention-days
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic Days
{
get;
set;
}
/// <summary>
/// Mode
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-defaultretention.html#cfn-s3-bucket-defaultretention-mode
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Mode
{
get;
set;
}
/// <summary>
/// Years
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-defaultretention.html#cfn-s3-bucket-defaultretention-years
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic Years
{
get;
set;
}
}
public class NotificationFilter
{
/// <summary>
/// S3Key
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfiguration-config-filter.html#cfn-s3-bucket-notificationconfiguraiton-config-filter-s3key
/// Required: True
/// UpdateType: Mutable
/// Type: S3KeyFilter
/// </summary>
public S3KeyFilter S3Key
{
get;
set;
}
}
public class LambdaConfiguration
{
/// <summary>
/// Event
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-lambdaconfig.html#cfn-s3-bucket-notificationconfig-lambdaconfig-event
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Event
{
get;
set;
}
/// <summary>
/// Filter
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-lambdaconfig.html#cfn-s3-bucket-notificationconfig-lambdaconfig-filter
/// Required: False
/// UpdateType: Mutable
/// Type: NotificationFilter
/// </summary>
public NotificationFilter Filter
{
get;
set;
}
/// <summary>
/// Function
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-lambdaconfig.html#cfn-s3-bucket-notificationconfig-lambdaconfig-function
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Function
{
get;
set;
}
}
public class ReplicationRule
{
/// <summary>
/// Destination
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules.html#cfn-s3-bucket-replicationconfiguration-rules-destination
/// Required: True
/// UpdateType: Mutable
/// Type: ReplicationDestination
/// </summary>
public ReplicationDestination Destination
{
get;
set;
}
/// <summary>
/// Id
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules.html#cfn-s3-bucket-replicationconfiguration-rules-id
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Id
{
get;
set;
}
/// <summary>
/// Prefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules.html#cfn-s3-bucket-replicationconfiguration-rules-prefix
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Prefix
{
get;
set;
}
/// <summary>
/// SourceSelectionCriteria
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules.html#cfn-s3-bucket-replicationrule-sourceselectioncriteria
/// Required: False
/// UpdateType: Mutable
/// Type: SourceSelectionCriteria
/// </summary>
public SourceSelectionCriteria SourceSelectionCriteria
{
get;
set;
}
/// <summary>
/// Status
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-replicationconfiguration-rules.html#cfn-s3-bucket-replicationconfiguration-rules-status
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Status
{
get;
set;
}
}
public class ServerSideEncryptionRule
{
/// <summary>
/// ServerSideEncryptionByDefault
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-serversideencryptionrule.html#cfn-s3-bucket-serversideencryptionrule-serversideencryptionbydefault
/// Required: False
/// UpdateType: Mutable
/// Type: ServerSideEncryptionByDefault
/// </summary>
public ServerSideEncryptionByDefault ServerSideEncryptionByDefault
{
get;
set;
}
}
public class AnalyticsConfiguration
{
/// <summary>
/// Id
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-analyticsconfiguration.html#cfn-s3-bucket-analyticsconfiguration-id
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Id
{
get;
set;
}
/// <summary>
/// Prefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-analyticsconfiguration.html#cfn-s3-bucket-analyticsconfiguration-prefix
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Prefix
{
get;
set;
}
/// <summary>
/// StorageClassAnalysis
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-analyticsconfiguration.html#cfn-s3-bucket-analyticsconfiguration-storageclassanalysis
/// Required: True
/// UpdateType: Mutable
/// Type: StorageClassAnalysis
/// </summary>
public StorageClassAnalysis StorageClassAnalysis
{
get;
set;
}
/// <summary>
/// TagFilters
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-analyticsconfiguration.html#cfn-s3-bucket-analyticsconfiguration-tagfilters
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: TagFilter
/// </summary>
public List<TagFilter> TagFilters
{
get;
set;
}
}
public class SourceSelectionCriteria
{
/// <summary>
/// SseKmsEncryptedObjects
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-sourceselectioncriteria.html#cfn-s3-bucket-sourceselectioncriteria-ssekmsencryptedobjects
/// Required: True
/// UpdateType: Mutable
/// Type: SseKmsEncryptedObjects
/// </summary>
public SseKmsEncryptedObjects SseKmsEncryptedObjects
{
get;
set;
}
}
public class LoggingConfiguration
{
/// <summary>
/// DestinationBucketName
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-loggingconfig.html#cfn-s3-bucket-loggingconfig-destinationbucketname
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic DestinationBucketName
{
get;
set;
}
/// <summary>
/// LogFilePrefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-loggingconfig.html#cfn-s3-bucket-loggingconfig-logfileprefix
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic LogFilePrefix
{
get;
set;
}
}
public class StorageClassAnalysis
{
/// <summary>
/// DataExport
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-storageclassanalysis.html#cfn-s3-bucket-storageclassanalysis-dataexport
/// Required: False
/// UpdateType: Mutable
/// Type: DataExport
/// </summary>
public DataExport DataExport
{
get;
set;
}
}
public class RoutingRule
{
/// <summary>
/// RedirectRule
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules.html#cfn-s3-websiteconfiguration-routingrules-redirectrule
/// Required: True
/// UpdateType: Mutable
/// Type: RedirectRule
/// </summary>
public RedirectRule RedirectRule
{
get;
set;
}
/// <summary>
/// RoutingRuleCondition
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules.html#cfn-s3-websiteconfiguration-routingrules-routingrulecondition
/// Required: False
/// UpdateType: Mutable
/// Type: RoutingRuleCondition
/// </summary>
public RoutingRuleCondition RoutingRuleCondition
{
get;
set;
}
}
public class EncryptionConfiguration
{
/// <summary>
/// ReplicaKmsKeyID
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-encryptionconfiguration.html#cfn-s3-bucket-encryptionconfiguration-replicakmskeyid
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic ReplicaKmsKeyID
{
get;
set;
}
}
public class RedirectRule
{
/// <summary>
/// HostName
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-redirectrule.html#cfn-s3-websiteconfiguration-redirectrule-hostname
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic HostName
{
get;
set;
}
/// <summary>
/// HttpRedirectCode
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-redirectrule.html#cfn-s3-websiteconfiguration-redirectrule-httpredirectcode
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic HttpRedirectCode
{
get;
set;
}
/// <summary>
/// Protocol
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-redirectrule.html#cfn-s3-websiteconfiguration-redirectrule-protocol
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Protocol
{
get;
set;
}
/// <summary>
/// ReplaceKeyPrefixWith
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-redirectrule.html#cfn-s3-websiteconfiguration-redirectrule-replacekeyprefixwith
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic ReplaceKeyPrefixWith
{
get;
set;
}
/// <summary>
/// ReplaceKeyWith
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration-routingrules-redirectrule.html#cfn-s3-websiteconfiguration-redirectrule-replacekeywith
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic ReplaceKeyWith
{
get;
set;
}
}
public class ObjectLockRule
{
/// <summary>
/// DefaultRetention
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-objectlockrule.html#cfn-s3-bucket-objectlockrule-defaultretention
/// Required: False
/// UpdateType: Mutable
/// Type: DefaultRetention
/// </summary>
public DefaultRetention DefaultRetention
{
get;
set;
}
}
public class WebsiteConfiguration
{
/// <summary>
/// ErrorDocument
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration.html#cfn-s3-websiteconfiguration-errordocument
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic ErrorDocument
{
get;
set;
}
/// <summary>
/// IndexDocument
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration.html#cfn-s3-websiteconfiguration-indexdocument
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic IndexDocument
{
get;
set;
}
/// <summary>
/// RedirectAllRequestsTo
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration.html#cfn-s3-websiteconfiguration-redirectallrequeststo
/// Required: False
/// UpdateType: Mutable
/// Type: RedirectAllRequestsTo
/// </summary>
public RedirectAllRequestsTo RedirectAllRequestsTo
{
get;
set;
}
/// <summary>
/// RoutingRules
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-websiteconfiguration.html#cfn-s3-websiteconfiguration-routingrules
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: RoutingRule
/// </summary>
public List<RoutingRule> RoutingRules
{
get;
set;
}
}
public class Rule
{
/// <summary>
/// AbortIncompleteMultipartUpload
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-rule-abortincompletemultipartupload
/// Required: False
/// UpdateType: Mutable
/// Type: AbortIncompleteMultipartUpload
/// </summary>
public AbortIncompleteMultipartUpload AbortIncompleteMultipartUpload
{
get;
set;
}
/// <summary>
/// ExpirationDate
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-expirationdate
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Timestamp
/// </summary>
public dynamic ExpirationDate
{
get;
set;
}
/// <summary>
/// ExpirationInDays
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-expirationindays
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic ExpirationInDays
{
get;
set;
}
/// <summary>
/// Id
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-id
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Id
{
get;
set;
}
/// <summary>
/// NoncurrentVersionExpirationInDays
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-noncurrentversionexpirationindays
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic NoncurrentVersionExpirationInDays
{
get;
set;
}
/// <summary>
/// NoncurrentVersionTransition
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-noncurrentversiontransition
/// Required: False
/// UpdateType: Mutable
/// Type: NoncurrentVersionTransition
/// </summary>
public NoncurrentVersionTransition NoncurrentVersionTransition
{
get;
set;
}
/// <summary>
/// NoncurrentVersionTransitions
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-noncurrentversiontransitions
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: NoncurrentVersionTransition
/// </summary>
public List<NoncurrentVersionTransition> NoncurrentVersionTransitions
{
get;
set;
}
/// <summary>
/// Prefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-prefix
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Prefix
{
get;
set;
}
/// <summary>
/// Status
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-status
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Status
{
get;
set;
}
/// <summary>
/// TagFilters
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-rule-tagfilters
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: TagFilter
/// </summary>
public List<TagFilter> TagFilters
{
get;
set;
}
/// <summary>
/// Transition
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-transition
/// Required: False
/// UpdateType: Mutable
/// Type: Transition
/// </summary>
public Transition Transition
{
get;
set;
}
/// <summary>
/// Transitions
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule.html#cfn-s3-bucket-lifecycleconfig-rule-transitions
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: Transition
/// </summary>
public List<Transition> Transitions
{
get;
set;
}
}
public class TopicConfiguration
{
/// <summary>
/// Event
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-topicconfig.html#cfn-s3-bucket-notificationconfig-topicconfig-event
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Event
{
get;
set;
}
/// <summary>
/// Filter
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-topicconfig.html#cfn-s3-bucket-notificationconfig-topicconfig-filter
/// Required: False
/// UpdateType: Mutable
/// Type: NotificationFilter
/// </summary>
public NotificationFilter Filter
{
get;
set;
}
/// <summary>
/// Topic
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfig-topicconfig.html#cfn-s3-bucket-notificationconfig-topicconfig-topic
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Topic
{
get;
set;
}
}
public class MetricsConfiguration
{
/// <summary>
/// Id
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-metricsconfiguration.html#cfn-s3-bucket-metricsconfiguration-id
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Id
{
get;
set;
}
/// <summary>
/// Prefix
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-metricsconfiguration.html#cfn-s3-bucket-metricsconfiguration-prefix
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Prefix
{
get;
set;
}
/// <summary>
/// TagFilters
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-metricsconfiguration.html#cfn-s3-bucket-metricsconfiguration-tagfilters
/// Required: False
/// UpdateType: Mutable
/// Type: List
/// ItemType: TagFilter
/// </summary>
public List<TagFilter> TagFilters
{
get;
set;
}
}
public class TagFilter
{
/// <summary>
/// Key
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-tagfilter.html#cfn-s3-bucket-tagfilter-key
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Key
{
get;
set;
}
/// <summary>
/// Value
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-tagfilter.html#cfn-s3-bucket-tagfilter-value
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Value
{
get;
set;
}
}
public class Transition
{
/// <summary>
/// StorageClass
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule-transition.html#cfn-s3-bucket-lifecycleconfig-rule-transition-storageclass
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic StorageClass
{
get;
set;
}
/// <summary>
/// TransitionDate
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule-transition.html#cfn-s3-bucket-lifecycleconfig-rule-transition-transitiondate
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Timestamp
/// </summary>
public dynamic TransitionDate
{
get;
set;
}
/// <summary>
/// TransitionInDays
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-lifecycleconfig-rule-transition.html#cfn-s3-bucket-lifecycleconfig-rule-transition-transitionindays
/// Required: False
/// UpdateType: Mutable
/// PrimitiveType: Integer
/// </summary>
public dynamic TransitionInDays
{
get;
set;
}
}
public class DataExport
{
/// <summary>
/// Destination
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-dataexport.html#cfn-s3-bucket-dataexport-destination
/// Required: True
/// UpdateType: Mutable
/// Type: Destination
/// </summary>
public Destination Destination
{
get;
set;
}
/// <summary>
/// OutputSchemaVersion
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-dataexport.html#cfn-s3-bucket-dataexport-outputschemaversion
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic OutputSchemaVersion
{
get;
set;
}
}
public class FilterRule
{
/// <summary>
/// Name
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfiguration-config-filter-s3key-rules.html#cfn-s3-bucket-notificationconfiguraiton-config-filter-s3key-rules-name
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Name
{
get;
set;
}
/// <summary>
/// Value
/// http://docs.aws.amazon.com/AWSCloudFormation/latest/UserGuide/aws-properties-s3-bucket-notificationconfiguration-config-filter-s3key-rules.html#cfn-s3-bucket-notificationconfiguraiton-config-filter-s3key-rules-value
/// Required: True
/// UpdateType: Mutable
/// PrimitiveType: String
/// </summary>
public dynamic Value
{
get;
set;
}
}
}
}
| |
//
// Authors:
// Alan McGovern [email protected]
// Ben Motmans <[email protected]>
// Lucas Ontivero [email protected]
//
// Copyright (C) 2006 Alan McGovern
// Copyright (C) 2007 Ben Motmans
// Copyright (C) 2014 Lucas Ontivero
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
namespace Open.Nat
{
/// <summary>
/// Represents a NAT device and provides access to the operation set that allows
/// open (forward) ports, close ports and get the externa (visible) IP address.
/// </summary>
public abstract class NatDevice
{
private readonly HashSet<Mapping> _openedMapping = new HashSet<Mapping>();
protected DateTime LastSeen { get; private set; }
internal void Touch()
{
LastSeen = DateTime.Now;
}
/// <summary>
/// Creates the port map asynchronous.
/// </summary>
/// <param name="mapping">The <see cref="Mapping">Mapping</see> entry.</param>
/// <example>
/// device.CreatePortMapAsync(new Mapping(Protocol.Tcp, 1700, 1600));
/// </example>
/// <exception cref="MappingException">MappingException</exception>
public abstract Task CreatePortMapAsync(Mapping mapping);
/// <summary>
/// Deletes a mapped port asynchronous.
/// </summary>
/// <param name="mapping">The <see cref="Mapping">Mapping</see> entry.</param>
/// <example>
/// device.DeletePortMapAsync(new Mapping(Protocol.Tcp, 1700, 1600));
/// </example>
/// <exception cref="MappingException">MappingException-class</exception>
public abstract Task DeletePortMapAsync(Mapping mapping);
/// <summary>
/// Gets all mappings asynchronous.
/// </summary>
/// <returns>
/// The list of all forwarded ports
/// </returns>
/// <example>
/// var mappings = await device.GetAllMappingsAsync();
/// foreach(var mapping in mappings)
/// {
/// Console.WriteLine(mapping)
/// }
/// </example>
/// <exception cref="MappingException">MappingException</exception>
public abstract Task<IEnumerable<Mapping>> GetAllMappingsAsync();
/// <summary>
/// Gets the external (visible) IP address asynchronous. This is the NAT device IP address
/// </summary>
/// <returns>
/// The public IP addrees
/// </returns>
/// <example>
/// Console.WriteLine("My public IP is: {0}", await device.GetExternalIPAsync());
/// </example>
/// <exception cref="MappingException">MappingException</exception>
public abstract Task<IPAddress> GetExternalIPAsync();
/// <summary>
/// Gets the specified mapping asynchronous.
/// </summary>
/// <param name="protocol">The protocol.</param>
/// <param name="port">The port.</param>
/// <returns>
/// The matching mapping
/// </returns>
public abstract Task<Mapping> GetSpecificMappingAsync(Protocol protocol, int port);
protected void RegisterMapping(Mapping mapping)
{
_openedMapping.Remove(mapping);
_openedMapping.Add(mapping);
}
protected void UnregisterMapping(Mapping mapping)
{
_openedMapping.RemoveWhere(x => x.Equals(mapping));
}
internal void ReleaseMapping(IEnumerable<Mapping> mappings)
{
var maparr = mappings.ToArray();
var mapCount = maparr.Length;
NatDiscoverer.TraceSource.LogInfo("{0} ports to close", mapCount);
for (var i = 0; i < mapCount; i++)
{
var mapping = _openedMapping.ElementAt(i);
try
{
DeletePortMapAsync(mapping);
NatDiscoverer.TraceSource.LogInfo(mapping + " port successfully closed");
}
catch (Exception)
{
NatDiscoverer.TraceSource.LogError(mapping + " port couldn't be close");
}
}
}
internal void ReleaseAll()
{
ReleaseMapping(_openedMapping);
}
internal void ReleaseSessionMappings()
{
var mappings = from m in _openedMapping
where m.LifetimeType == MappingLifetime.Session
select m;
ReleaseMapping(mappings);
}
#if NET35
internal Task RenewMappings()
{
Task task = null;
var mappings = _openedMapping.Where(x => x.ShoundRenew());
foreach (var mapping in mappings.ToArray())
{
var m = mapping;
task = task == null ? RenewMapping(m) : task.ContinueWith(t => RenewMapping(m)).Unwrap();
}
return task;
}
#else
internal async Task RenewMappings()
{
var mappings = _openedMapping.Where(x => x.ShoundRenew());
foreach (var mapping in mappings.ToArray())
{
var m = mapping;
await RenewMapping(m);
}
}
#endif
#if NET35
private Task RenewMapping(Mapping mapping)
{
var renewMapping = new Mapping(mapping);
renewMapping.Expiration = DateTime.UtcNow.AddSeconds(mapping.Lifetime);
NatDiscoverer.TraceSource.LogInfo("Renewing mapping {0}", renewMapping);
return CreatePortMapAsync(renewMapping)
.ContinueWith(task =>
{
if (task.IsFaulted)
{
NatDiscoverer.TraceSource.LogWarn("Renew {0} failed", mapping);
}
else
{
NatDiscoverer.TraceSource.LogInfo("Next renew scheduled at: {0}",
renewMapping.Expiration.ToLocalTime().TimeOfDay);
}
});
}
#else
private async Task RenewMapping(Mapping mapping)
{
var renewMapping = new Mapping(mapping);
try
{
renewMapping.Expiration = DateTime.UtcNow.AddSeconds(mapping.Lifetime);
NatDiscoverer.TraceSource.LogInfo("Renewing mapping {0}", renewMapping);
await CreatePortMapAsync(renewMapping);
NatDiscoverer.TraceSource.LogInfo("Next renew scheduled at: {0}",
renewMapping.Expiration.ToLocalTime().TimeOfDay);
}
catch (Exception)
{
NatDiscoverer.TraceSource.LogWarn("Renew {0} failed", mapping);
}
}
#endif
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Security.AccessControl;
using System.Text;
using AshMind.IO.Abstractions.Security;
namespace AshMind.IO.Abstractions.Bases {
public abstract class FileBase : FileSystemInfoBase, IFile {
public virtual IFileSecurity GetAccessControl() {
throw new NotImplementedException();
}
public virtual IFileSecurity GetAccessControl(AccessControlSections includeSections) {
throw new NotImplementedException();
}
public virtual void SetAccessControl(IFileSecurity fileSecurity) {
throw new NotImplementedException();
}
public virtual StreamReader OpenText() {
throw new NotImplementedException();
}
public virtual StreamWriter CreateText() {
throw new NotImplementedException();
}
public virtual StreamWriter AppendText() {
throw new NotImplementedException();
}
public virtual IFile CopyTo(string destFileName) {
return CopyTo(destFileName, false);
}
public virtual IFile CopyTo(string destFileName, bool overwrite) {
throw new NotImplementedException();
}
public virtual void Decrypt() {
throw new NotImplementedException();
}
public virtual void Encrypt() {
throw new NotImplementedException();
}
public virtual Stream Create() {
return Open(FileMode.Create, FileAccess.ReadWrite, FileShare.None);
}
public virtual Stream Open(FileMode mode) {
return Open(mode, FileAccess.ReadWrite, FileShare.None);
}
public virtual Stream Open(FileMode mode, FileAccess access) {
return Open(mode, access, FileShare.None);
}
public virtual Stream Open(FileMode mode, FileAccess access, FileShare share) {
throw new NotImplementedException();
}
public virtual Stream OpenRead() {
return Open(FileMode.Open, FileAccess.Read, FileShare.Read);
}
public virtual Stream OpenWrite() {
return Open(FileMode.OpenOrCreate, FileAccess.Write, FileShare.None);
}
public virtual void MoveTo(string destFileName) {
throw new NotImplementedException();
}
public virtual IFile Replace(string destinationFileName, string destinationBackupFileName) {
return Replace(destinationFileName, destinationBackupFileName, false);
}
public virtual IFile Replace(string destinationFileName, string destinationBackupFileName, bool ignoreMetadataErrors) {
throw new NotImplementedException();
}
public virtual long Length {
get { throw new NotImplementedException(); }
}
public virtual string DirectoryName {
get {
if (Directory == null)
return null;
return Directory.FullName;
}
}
public virtual IDirectory Directory {
get { throw new NotImplementedException(); }
}
public virtual bool IsReadOnly {
get { return (Attributes & FileAttributes.ReadOnly) == FileAttributes.ReadOnly; }
set { Attributes = (value ? Attributes | FileAttributes.ReadOnly : Attributes & ~FileAttributes.ReadOnly); }
}
public virtual string ReadAllText() {
return ReadAllText(Encoding.UTF8);
}
public virtual string ReadAllText(Encoding encoding) {
using (var stream = OpenRead())
using (var reader = new StreamReader(stream, encoding)) {
return reader.ReadToEnd();
}
}
public virtual string[] ReadAllLines() {
return ReadAllLines(Encoding.UTF8);
}
public virtual string[] ReadAllLines(Encoding encoding) {
using (var stream = OpenRead())
using (var reader = new StreamReader(stream, encoding)) {
var lines = new List<string>();
var line = reader.ReadLine();
while (line != null) {
lines.Add(line);
line = reader.ReadLine();
}
return lines.ToArray();
}
}
public virtual byte[] ReadAllBytes() {
using (var stream = OpenRead()) {
var length = stream.Length;
if (length > int.MaxValue)
throw new IOException("Cannot read stream longer than " + int.MaxValue + " bytes.");
var buffer = new byte[length];
stream.Read(buffer, 0, (int)length);
return buffer;
}
}
public virtual void WriteAllText(string contents) {
WriteAllText(contents, Encoding.UTF8);
}
public virtual void WriteAllText(string contents, Encoding encoding) {
using (var stream = OpenWrite())
using (var writer = new StreamWriter(stream, encoding)) {
writer.Write(contents);
}
}
public virtual void WriteAllLines(string[] contents) {
WriteAllLines((IEnumerable<string>)contents, Encoding.UTF8);
}
public virtual void WriteAllLines(string[] contents, Encoding encoding) {
WriteAllLines((IEnumerable<string>)contents, Encoding.UTF8);
}
public virtual void WriteAllLines(IEnumerable<string> contents) {
WriteAllLines(contents, Encoding.UTF8);
}
public virtual void WriteAllLines(IEnumerable<string> contents, Encoding encoding) {
using (var stream = OpenWrite())
using (var writer = new StreamWriter(stream, encoding)) {
foreach (var line in contents) {
writer.Write(line);
}
}
}
public virtual void WriteAllBytes(byte[] bytes) {
using (var stream = OpenWrite()) {
stream.Write(bytes, 0, bytes.Length);
}
}
public virtual void AppendAllText(string contents) {
AppendAllText(contents, Encoding.UTF8);
}
public virtual void AppendAllText(string contents, Encoding encoding) {
using (var stream = Open(FileMode.Append, FileAccess.Write, FileShare.None))
using (var writer = new StreamWriter(stream, encoding)) {
writer.Write(contents);
}
}
public virtual void AppendAllLines(string[] contents) {
AppendAllLines((IEnumerable<string>)contents, Encoding.UTF8);
}
public virtual void AppendAllLines(string[] contents, Encoding encoding) {
AppendAllLines((IEnumerable<string>)contents, Encoding.UTF8);
}
public virtual void AppendAllLines(IEnumerable<string> contents) {
AppendAllLines(contents, Encoding.UTF8);
}
public virtual void AppendAllLines(IEnumerable<string> contents, Encoding encoding) {
using (var stream = Open(FileMode.Append, FileAccess.Write, FileShare.None))
using (var writer = new StreamWriter(stream, encoding)) {
foreach (var line in contents) {
writer.Write(line);
}
}
}
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using Newtonsoft.Json.Serialization;
#if !NETFX_CORE
using NUnit.Framework;
#else
using Microsoft.VisualStudio.TestPlatform.UnitTestFramework;
using TestFixture = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestClassAttribute;
using Test = Microsoft.VisualStudio.TestPlatform.UnitTestFramework.TestMethodAttribute;
#endif
using Newtonsoft.Json.Tests.TestObjects;
using Newtonsoft.Json.Linq;
using System.Reflection;
using Newtonsoft.Json.Utilities;
namespace Newtonsoft.Json.Tests.Serialization
{
[TestFixture]
public class CamelCasePropertyNamesContractResolverTests : TestFixtureBase
{
[Test]
public void JsonConvertSerializerSettings()
{
Person person = new Person();
person.BirthDate = new DateTime(2000, 11, 20, 23, 55, 44, DateTimeKind.Utc);
person.LastModified = new DateTime(2000, 11, 20, 23, 55, 44, DateTimeKind.Utc);
person.Name = "Name!";
string json = JsonConvert.SerializeObject(person, Formatting.Indented, new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver()
});
Assert.AreEqual(@"{
""name"": ""Name!"",
""birthDate"": ""2000-11-20T23:55:44Z"",
""lastModified"": ""2000-11-20T23:55:44Z""
}", json);
Person deserializedPerson = JsonConvert.DeserializeObject<Person>(json, new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver()
});
Assert.AreEqual(person.BirthDate, deserializedPerson.BirthDate);
Assert.AreEqual(person.LastModified, deserializedPerson.LastModified);
Assert.AreEqual(person.Name, deserializedPerson.Name);
json = JsonConvert.SerializeObject(person, Formatting.Indented);
Assert.AreEqual(@"{
""Name"": ""Name!"",
""BirthDate"": ""2000-11-20T23:55:44Z"",
""LastModified"": ""2000-11-20T23:55:44Z""
}", json);
}
[Test]
public void JTokenWriter()
{
JsonIgnoreAttributeOnClassTestClass ignoreAttributeOnClassTestClass = new JsonIgnoreAttributeOnClassTestClass();
ignoreAttributeOnClassTestClass.Field = int.MinValue;
JsonSerializer serializer = new JsonSerializer();
serializer.ContractResolver = new CamelCasePropertyNamesContractResolver();
JTokenWriter writer = new JTokenWriter();
serializer.Serialize(writer, ignoreAttributeOnClassTestClass);
JObject o = (JObject)writer.Token;
JProperty p = o.Property("theField");
Assert.IsNotNull(p);
Assert.AreEqual(int.MinValue, (int)p.Value);
string json = o.ToString();
}
#if !(NETFX_CORE || PORTABLE || PORTABLE40)
#pragma warning disable 618
[Test]
public void MemberSearchFlags()
{
PrivateMembersClass privateMembersClass = new PrivateMembersClass("PrivateString!", "InternalString!");
string json = JsonConvert.SerializeObject(privateMembersClass, Formatting.Indented, new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver { DefaultMembersSearchFlags = BindingFlags.NonPublic | BindingFlags.Instance }
});
Assert.AreEqual(@"{
""_privateString"": ""PrivateString!"",
""i"": 0,
""_internalString"": ""InternalString!""
}", json);
PrivateMembersClass deserializedPrivateMembersClass = JsonConvert.DeserializeObject<PrivateMembersClass>(@"{
""_privateString"": ""Private!"",
""i"": -2,
""_internalString"": ""Internal!""
}", new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver { DefaultMembersSearchFlags = BindingFlags.NonPublic | BindingFlags.Instance }
});
Assert.AreEqual("Private!", ReflectionUtils.GetMemberValue(typeof(PrivateMembersClass).GetField("_privateString", BindingFlags.Instance | BindingFlags.NonPublic), deserializedPrivateMembersClass));
Assert.AreEqual("Internal!", ReflectionUtils.GetMemberValue(typeof(PrivateMembersClass).GetField("_internalString", BindingFlags.Instance | BindingFlags.NonPublic), deserializedPrivateMembersClass));
// readonly
Assert.AreEqual(0, ReflectionUtils.GetMemberValue(typeof(PrivateMembersClass).GetField("i", BindingFlags.Instance | BindingFlags.NonPublic), deserializedPrivateMembersClass));
}
#pragma warning restore 618
#endif
[Test]
public void BlogPostExample()
{
Product product = new Product
{
ExpiryDate = new DateTime(2010, 12, 20, 18, 1, 0, DateTimeKind.Utc),
Name = "Widget",
Price = 9.99m,
Sizes = new[] { "Small", "Medium", "Large" }
};
string json =
JsonConvert.SerializeObject(
product,
Formatting.Indented,
new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver() }
);
//{
// "name": "Widget",
// "expiryDate": "\/Date(1292868060000)\/",
// "price": 9.99,
// "sizes": [
// "Small",
// "Medium",
// "Large"
// ]
//}
Assert.AreEqual(@"{
""name"": ""Widget"",
""expiryDate"": ""2010-12-20T18:01:00Z"",
""price"": 9.99,
""sizes"": [
""Small"",
""Medium"",
""Large""
]
}", json);
}
#if !(NET35 || NET20 || PORTABLE40)
[Test]
public void DynamicCamelCasePropertyNames()
{
dynamic o = new TestDynamicObject();
o.Text = "Text!";
o.Integer = int.MaxValue;
string json = JsonConvert.SerializeObject(o, Formatting.Indented,
new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver()
});
Assert.AreEqual(@"{
""explicit"": false,
""text"": ""Text!"",
""integer"": 2147483647,
""int"": 0,
""childObject"": null
}", json);
}
#endif
[Test]
public void DictionaryCamelCasePropertyNames()
{
Dictionary<string, string> values = new Dictionary<string, string>
{
{ "First", "Value1!" },
{ "Second", "Value2!" }
};
string json = JsonConvert.SerializeObject(values, Formatting.Indented,
new JsonSerializerSettings
{
ContractResolver = new CamelCasePropertyNamesContractResolver()
});
Assert.AreEqual(@"{
""first"": ""Value1!"",
""second"": ""Value2!""
}", json);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Governance.TimerJobs.RemediationUx.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void BitFieldExtractUInt323Op()
{
var test = new ScalarTernOpTest__BitFieldExtractUInt32();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.ReadUnaligned
test.RunBasicScenario_UnsafeRead();
// Validates calling via reflection works, using Unsafe.ReadUnaligned
test.RunReflectionScenario_UnsafeRead();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.ReadUnaligned
test.RunLclVarScenario_UnsafeRead();
// Validates passing the field of a local class works
test.RunClassLclFldScenario();
// Validates passing an instance member of a class works
test.RunClassFldScenario();
// Validates passing the field of a local struct works
test.RunStructLclFldScenario();
// Validates passing an instance member of a struct works
test.RunStructFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class ScalarTernOpTest__BitFieldExtractUInt32
{
private struct TestStruct
{
public UInt32 _fld1;
public Byte _fld2;
public Byte _fld3;
public static TestStruct Create()
{
var testStruct = new TestStruct();
testStruct._fld1 = 0x1E000000;
testStruct._fld2 = 25;
testStruct._fld3 = 4;
return testStruct;
}
public void RunStructFldScenario(ScalarTernOpTest__BitFieldExtractUInt32 testClass)
{
var result = Bmi1.BitFieldExtract(_fld1, _fld2, _fld3);
testClass.ValidateResult(_fld1, _fld2, _fld3, result);
}
}
private static UInt32 _data1;
private static Byte _data2;
private static Byte _data3;
private static UInt32 _clsVar1;
private static Byte _clsVar2;
private static Byte _clsVar3;
private UInt32 _fld1;
private Byte _fld2;
private Byte _fld3;
static ScalarTernOpTest__BitFieldExtractUInt32()
{
_clsVar1 = 0x1E000000;
_clsVar2 = 25;
_clsVar3 = 4;
}
public ScalarTernOpTest__BitFieldExtractUInt32()
{
Succeeded = true;
_fld1 = 0x1E000000;
_fld2 = 25;
_fld3 = 4;
_data1 = 0x1E000000;
_data2 = 25;
_data3 = 4;
}
public bool IsSupported => Bmi1.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunBasicScenario_UnsafeRead));
var result = Bmi1.BitFieldExtract(
Unsafe.ReadUnaligned<UInt32>(ref Unsafe.As<UInt32, byte>(ref _data1)),
Unsafe.ReadUnaligned<Byte>(ref Unsafe.As<Byte, byte>(ref _data2)),
Unsafe.ReadUnaligned<Byte>(ref Unsafe.As<Byte, byte>(ref _data3))
);
ValidateResult(_data1, _data2, _data3, result);
}
public void RunReflectionScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunReflectionScenario_UnsafeRead));
var result = typeof(Bmi1).GetMethod(nameof(Bmi1.BitFieldExtract), new Type[] { typeof(UInt32), typeof(Byte), typeof(Byte) })
.Invoke(null, new object[] {
Unsafe.ReadUnaligned<UInt32>(ref Unsafe.As<UInt32, byte>(ref _data1)),
Unsafe.ReadUnaligned<Byte>(ref Unsafe.As<Byte, byte>(ref _data2)),
Unsafe.ReadUnaligned<Byte>(ref Unsafe.As<Byte, byte>(ref _data3))
});
ValidateResult(_data1, _data2, _data3, (UInt32)result);
}
public void RunClsVarScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClsVarScenario));
var result = Bmi1.BitFieldExtract(
_clsVar1,
_clsVar2,
_clsVar3
);
ValidateResult(_clsVar1, _clsVar2, _clsVar3, result);
}
public void RunLclVarScenario_UnsafeRead()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunLclVarScenario_UnsafeRead));
var data1 = Unsafe.ReadUnaligned<UInt32>(ref Unsafe.As<UInt32, byte>(ref _data1));
var data2 = Unsafe.ReadUnaligned<Byte>(ref Unsafe.As<Byte, byte>(ref _data2));
var data3 = Unsafe.ReadUnaligned<Byte>(ref Unsafe.As<Byte, byte>(ref _data3));
var result = Bmi1.BitFieldExtract(data1, data2, data3);
ValidateResult(data1, data2, data3, result);
}
public void RunClassLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassLclFldScenario));
var test = new ScalarTernOpTest__BitFieldExtractUInt32();
var result = Bmi1.BitFieldExtract(test._fld1, test._fld2, test._fld3);
ValidateResult(test._fld1, test._fld2, test._fld3, result);
}
public void RunClassFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunClassFldScenario));
var result = Bmi1.BitFieldExtract(_fld1, _fld2, _fld3);
ValidateResult(_fld1, _fld2, _fld3, result);
}
public void RunStructLclFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructLclFldScenario));
var test = TestStruct.Create();
var result = Bmi1.BitFieldExtract(test._fld1, test._fld2, test._fld3);
ValidateResult(test._fld1, test._fld2, test._fld3, result);
}
public void RunStructFldScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunStructFldScenario));
var test = TestStruct.Create();
test.RunStructFldScenario(this);
}
public void RunUnsupportedScenario()
{
TestLibrary.TestFramework.BeginScenario(nameof(RunUnsupportedScenario));
bool succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
succeeded = true;
}
if (!succeeded)
{
Succeeded = false;
}
}
private void ValidateResult(UInt32 op1, Byte op2, Byte op3, UInt32 result, [CallerMemberName] string method = "")
{
var isUnexpectedResult = false;
uint expectedResult = 15; isUnexpectedResult = (expectedResult != result);
if (isUnexpectedResult)
{
TestLibrary.TestFramework.LogInformation($"{nameof(Bmi1)}.{nameof(Bmi1.BitFieldExtract)}<UInt32>(UInt32, Byte, Byte): BitFieldExtract failed:");
TestLibrary.TestFramework.LogInformation($" op1: {op1}");
TestLibrary.TestFramework.LogInformation($" op2: {op2}");
TestLibrary.TestFramework.LogInformation($" op3: {op3}");
TestLibrary.TestFramework.LogInformation($" result: {result}");
TestLibrary.TestFramework.LogInformation(string.Empty);
Succeeded = false;
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.13.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Network
{
using System;
using System.Collections.Generic;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// VirtualNetworkGatewayConnectionsOperations operations.
/// </summary>
public partial interface IVirtualNetworkGatewayConnectionsOperations
{
/// <summary>
/// The Put VirtualNetworkGatewayConnection operation creates/updates
/// a virtual network gateway connection in the specified resource
/// group through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The name of the virtual network gateway conenction.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Begin Create or update Virtual Network
/// Gateway connection operation through Network resource provider.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<VirtualNetworkGatewayConnection>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, VirtualNetworkGatewayConnection parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Put VirtualNetworkGatewayConnection operation creates/updates
/// a virtual network gateway connection in the specified resource
/// group through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The name of the virtual network gateway conenction.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Begin Create or update Virtual Network
/// Gateway connection operation through Network resource provider.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<VirtualNetworkGatewayConnection>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, VirtualNetworkGatewayConnection parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Get VirtualNetworkGatewayConnection operation retrieves
/// information about the specified virtual network gateway
/// connection through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The name of the virtual network gateway connection.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<VirtualNetworkGatewayConnection>> GetWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Delete VirtualNetworkGatewayConnection operation deletes the
/// specifed virtual network Gateway connection through Network
/// resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The name of the virtual network gateway connection.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Delete VirtualNetworkGatewayConnection operation deletes the
/// specifed virtual network Gateway connection through Network
/// resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The name of the virtual network gateway connection.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Get VirtualNetworkGatewayConnectionSharedKey operation
/// retrieves information about the specified virtual network gateway
/// connection shared key through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='connectionSharedKeyName'>
/// The virtual network gateway connection shared key name.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<ConnectionSharedKeyResult>> GetSharedKeyWithHttpMessagesAsync(string resourceGroupName, string connectionSharedKeyName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The List VirtualNetworkGatewayConnections operation retrieves all
/// the virtual network gateways connections created.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<VirtualNetworkGatewayConnection>>> ListWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The VirtualNetworkGatewayConnectionResetSharedKey operation resets
/// the virtual network gateway connection shared key for passed
/// virtual network gateway connection in the specified resource
/// group through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The virtual network gateway connection reset shared key Name.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Begin Reset Virtual Network Gateway
/// connection shared key operation through Network resource provider.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<ConnectionResetSharedKey>> ResetSharedKeyWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, ConnectionResetSharedKey parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The VirtualNetworkGatewayConnectionResetSharedKey operation resets
/// the virtual network gateway connection shared key for passed
/// virtual network gateway connection in the specified resource
/// group through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The virtual network gateway connection reset shared key Name.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Begin Reset Virtual Network Gateway
/// connection shared key operation through Network resource provider.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<ConnectionResetSharedKey>> BeginResetSharedKeyWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, ConnectionResetSharedKey parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Put VirtualNetworkGatewayConnectionSharedKey operation sets
/// the virtual network gateway connection shared key for passed
/// virtual network gateway connection in the specified resource
/// group through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The virtual network gateway connection name.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Begin Set Virtual Network Gateway
/// conection Shared key operation throughNetwork resource provider.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<ConnectionSharedKey>> SetSharedKeyWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, ConnectionSharedKey parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The Put VirtualNetworkGatewayConnectionSharedKey operation sets
/// the virtual network gateway connection shared key for passed
/// virtual network gateway connection in the specified resource
/// group through Network resource provider.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='virtualNetworkGatewayConnectionName'>
/// The virtual network gateway connection name.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to the Begin Set Virtual Network Gateway
/// conection Shared key operation throughNetwork resource provider.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<ConnectionSharedKey>> BeginSetSharedKeyWithHttpMessagesAsync(string resourceGroupName, string virtualNetworkGatewayConnectionName, ConnectionSharedKey parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// The List VirtualNetworkGatewayConnections operation retrieves all
/// the virtual network gateways connections created.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
Task<AzureOperationResponse<IPage<VirtualNetworkGatewayConnection>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.Collections.Generic;
using System.IO;
#if !(NET20 || NET35 || PORTABLE40 || PORTABLE)
using System.Numerics;
#endif
using Newtonsoft.Json.Utilities;
using System.Globalization;
#if NET20
using Newtonsoft.Json.Utilities.LinqBridge;
#else
using System.Linq;
#endif
namespace Newtonsoft.Json
{
/// <summary>
/// Represents a writer that provides a fast, non-cached, forward-only way of generating JSON data.
/// </summary>
public abstract class JsonWriter : IDisposable
{
internal enum State
{
Start = 0,
Property = 1,
ObjectStart = 2,
Object = 3,
ArrayStart = 4,
Array = 5,
ConstructorStart = 6,
Constructor = 7,
Closed = 8,
Error = 9
}
// array that gives a new state based on the current state an the token being written
private static readonly State[][] StateArray;
internal static readonly State[][] StateArrayTempate = new[]
{
// Start PropertyName ObjectStart Object ArrayStart Array ConstructorStart Constructor Closed Error
//
/* None */new[] { State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error },
/* StartObject */new[] { State.ObjectStart, State.ObjectStart, State.Error, State.Error, State.ObjectStart, State.ObjectStart, State.ObjectStart, State.ObjectStart, State.Error, State.Error },
/* StartArray */new[] { State.ArrayStart, State.ArrayStart, State.Error, State.Error, State.ArrayStart, State.ArrayStart, State.ArrayStart, State.ArrayStart, State.Error, State.Error },
/* StartConstructor */new[] { State.ConstructorStart, State.ConstructorStart, State.Error, State.Error, State.ConstructorStart, State.ConstructorStart, State.ConstructorStart, State.ConstructorStart, State.Error, State.Error },
/* Property */new[] { State.Property, State.Error, State.Property, State.Property, State.Error, State.Error, State.Error, State.Error, State.Error, State.Error },
/* Comment */new[] { State.Start, State.Property, State.ObjectStart, State.Object, State.ArrayStart, State.Array, State.Constructor, State.Constructor, State.Error, State.Error },
/* Raw */new[] { State.Start, State.Property, State.ObjectStart, State.Object, State.ArrayStart, State.Array, State.Constructor, State.Constructor, State.Error, State.Error },
/* Value (this will be copied) */new[] { State.Start, State.Object, State.Error, State.Error, State.Array, State.Array, State.Constructor, State.Constructor, State.Error, State.Error }
};
internal static State[][] BuildStateArray()
{
var allStates = StateArrayTempate.ToList();
var errorStates = StateArrayTempate[0];
var valueStates = StateArrayTempate[7];
foreach (JsonToken valueToken in EnumUtils.GetValues(typeof(JsonToken)))
{
if (allStates.Count <= (int)valueToken)
{
switch (valueToken)
{
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.String:
case JsonToken.Boolean:
case JsonToken.Null:
case JsonToken.Undefined:
case JsonToken.Date:
case JsonToken.Bytes:
allStates.Add(valueStates);
break;
default:
allStates.Add(errorStates);
break;
}
}
}
return allStates.ToArray();
}
static JsonWriter()
{
StateArray = BuildStateArray();
}
private readonly List<JsonPosition> _stack;
private JsonPosition _currentPosition;
private State _currentState;
private Formatting _formatting;
/// <summary>
/// Gets or sets a value indicating whether the underlying stream or
/// <see cref="TextReader"/> should be closed when the writer is closed.
/// </summary>
/// <value>
/// true to close the underlying stream or <see cref="TextReader"/> when
/// the writer is closed; otherwise false. The default is true.
/// </value>
public bool CloseOutput { get; set; }
/// <summary>
/// Gets the top.
/// </summary>
/// <value>The top.</value>
protected internal int Top
{
get
{
int depth = _stack.Count;
if (Peek() != JsonContainerType.None)
depth++;
return depth;
}
}
/// <summary>
/// Gets the state of the writer.
/// </summary>
public WriteState WriteState
{
get
{
switch (_currentState)
{
case State.Error:
return WriteState.Error;
case State.Closed:
return WriteState.Closed;
case State.Object:
case State.ObjectStart:
return WriteState.Object;
case State.Array:
case State.ArrayStart:
return WriteState.Array;
case State.Constructor:
case State.ConstructorStart:
return WriteState.Constructor;
case State.Property:
return WriteState.Property;
case State.Start:
return WriteState.Start;
default:
throw JsonWriterException.Create(this, "Invalid state: " + _currentState, null);
}
}
}
internal string ContainerPath
{
get
{
if (_currentPosition.Type == JsonContainerType.None)
return string.Empty;
return JsonPosition.BuildPath(_stack);
}
}
/// <summary>
/// Gets the path of the writer.
/// </summary>
public string Path
{
get
{
if (_currentPosition.Type == JsonContainerType.None)
return string.Empty;
bool insideContainer = (_currentState != State.ArrayStart
&& _currentState != State.ConstructorStart
&& _currentState != State.ObjectStart);
IEnumerable<JsonPosition> positions = (!insideContainer)
? _stack
: _stack.Concat(new[] { _currentPosition });
return JsonPosition.BuildPath(positions);
}
}
private DateFormatHandling _dateFormatHandling;
private DateTimeZoneHandling _dateTimeZoneHandling;
private StringEscapeHandling _stringEscapeHandling;
private FloatFormatHandling _floatFormatHandling;
private string _dateFormatString;
private CultureInfo _culture;
/// <summary>
/// Indicates how JSON text output is formatted.
/// </summary>
public Formatting Formatting
{
get { return _formatting; }
set { _formatting = value; }
}
/// <summary>
/// Get or set how dates are written to JSON text.
/// </summary>
public DateFormatHandling DateFormatHandling
{
get { return _dateFormatHandling; }
set { _dateFormatHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> time zones are handling when writing JSON text.
/// </summary>
public DateTimeZoneHandling DateTimeZoneHandling
{
get { return _dateTimeZoneHandling; }
set { _dateTimeZoneHandling = value; }
}
/// <summary>
/// Get or set how strings are escaped when writing JSON text.
/// </summary>
public StringEscapeHandling StringEscapeHandling
{
get { return _stringEscapeHandling; }
set
{
_stringEscapeHandling = value;
OnStringEscapeHandlingChanged();
}
}
internal virtual void OnStringEscapeHandlingChanged()
{
// hacky but there is a calculated value that relies on StringEscapeHandling
}
/// <summary>
/// Get or set how special floating point numbers, e.g. <see cref="F:System.Double.NaN"/>,
/// <see cref="F:System.Double.PositiveInfinity"/> and <see cref="F:System.Double.NegativeInfinity"/>,
/// are written to JSON text.
/// </summary>
public FloatFormatHandling FloatFormatHandling
{
get { return _floatFormatHandling; }
set { _floatFormatHandling = value; }
}
/// <summary>
/// Get or set how <see cref="DateTime"/> and <see cref="DateTimeOffset"/> values are formatting when writing JSON text.
/// </summary>
public string DateFormatString
{
get { return _dateFormatString; }
set { _dateFormatString = value; }
}
/// <summary>
/// Gets or sets the culture used when writing JSON. Defaults to <see cref="CultureInfo.InvariantCulture"/>.
/// </summary>
public CultureInfo Culture
{
get { return _culture ?? CultureInfo.InvariantCulture; }
set { _culture = value; }
}
/// <summary>
/// Creates an instance of the <c>JsonWriter</c> class.
/// </summary>
protected JsonWriter()
{
_stack = new List<JsonPosition>(4);
_currentState = State.Start;
_formatting = Formatting.None;
_dateTimeZoneHandling = DateTimeZoneHandling.RoundtripKind;
CloseOutput = true;
}
internal void UpdateScopeWithFinishedValue()
{
if (_currentPosition.HasIndex)
_currentPosition.Position++;
}
private void Push(JsonContainerType value)
{
if (_currentPosition.Type != JsonContainerType.None)
_stack.Add(_currentPosition);
_currentPosition = new JsonPosition(value);
}
private JsonContainerType Pop()
{
JsonPosition oldPosition = _currentPosition;
if (_stack.Count > 0)
{
_currentPosition = _stack[_stack.Count - 1];
_stack.RemoveAt(_stack.Count - 1);
}
else
{
_currentPosition = new JsonPosition();
}
return oldPosition.Type;
}
private JsonContainerType Peek()
{
return _currentPosition.Type;
}
/// <summary>
/// Flushes whatever is in the buffer to the underlying streams and also flushes the underlying stream.
/// </summary>
public abstract void Flush();
/// <summary>
/// Closes this stream and the underlying stream.
/// </summary>
public virtual void Close()
{
AutoCompleteAll();
}
/// <summary>
/// Writes the beginning of a JSON object.
/// </summary>
public virtual void WriteStartObject()
{
InternalWriteStart(JsonToken.StartObject, JsonContainerType.Object);
}
/// <summary>
/// Writes the end of a JSON object.
/// </summary>
public virtual void WriteEndObject()
{
InternalWriteEnd(JsonContainerType.Object);
}
/// <summary>
/// Writes the beginning of a JSON array.
/// </summary>
public virtual void WriteStartArray()
{
InternalWriteStart(JsonToken.StartArray, JsonContainerType.Array);
}
/// <summary>
/// Writes the end of an array.
/// </summary>
public virtual void WriteEndArray()
{
InternalWriteEnd(JsonContainerType.Array);
}
/// <summary>
/// Writes the start of a constructor with the given name.
/// </summary>
/// <param name="name">The name of the constructor.</param>
public virtual void WriteStartConstructor(string name)
{
InternalWriteStart(JsonToken.StartConstructor, JsonContainerType.Constructor);
}
/// <summary>
/// Writes the end constructor.
/// </summary>
public virtual void WriteEndConstructor()
{
InternalWriteEnd(JsonContainerType.Constructor);
}
/// <summary>
/// Writes the property name of a name/value pair on a JSON object.
/// </summary>
/// <param name="name">The name of the property.</param>
public virtual void WritePropertyName(string name)
{
InternalWritePropertyName(name);
}
/// <summary>
/// Writes the property name of a name/value pair on a JSON object.
/// </summary>
/// <param name="name">The name of the property.</param>
/// <param name="escape">A flag to indicate whether the text should be escaped when it is written as a JSON property name.</param>
public virtual void WritePropertyName(string name, bool escape)
{
WritePropertyName(name);
}
/// <summary>
/// Writes the end of the current JSON object or array.
/// </summary>
public virtual void WriteEnd()
{
WriteEnd(Peek());
}
/// <summary>
/// Writes the current <see cref="JsonReader"/> token and its children.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> to read the token from.</param>
public void WriteToken(JsonReader reader)
{
WriteToken(reader, true, true, true);
}
/// <summary>
/// Writes the current <see cref="JsonReader"/> token.
/// </summary>
/// <param name="reader">The <see cref="JsonReader"/> to read the token from.</param>
/// <param name="writeChildren">A flag indicating whether the current token's children should be written.</param>
public void WriteToken(JsonReader reader, bool writeChildren)
{
ValidationUtils.ArgumentNotNull(reader, "reader");
WriteToken(reader, writeChildren, true, true);
}
/// <summary>
/// Writes the <see cref="JsonToken"/> token and its value.
/// </summary>
/// <param name="token">The <see cref="JsonToken"/> to write.</param>
/// <param name="value">
/// The value to write.
/// A value is only required for tokens that have an associated value, e.g. the <see cref="String"/> property name for <see cref="JsonToken.PropertyName"/>.
/// A null value can be passed to the method for token's that don't have a value, e.g. <see cref="JsonToken.StartObject"/>.</param>
public void WriteToken(JsonToken token, object value)
{
WriteTokenInternal(token, value);
}
/// <summary>
/// Writes the <see cref="JsonToken"/> token.
/// </summary>
/// <param name="token">The <see cref="JsonToken"/> to write.</param>
public void WriteToken(JsonToken token)
{
WriteTokenInternal(token, null);
}
internal void WriteToken(JsonReader reader, bool writeChildren, bool writeDateConstructorAsDate, bool writeComments)
{
int initialDepth;
if (reader.TokenType == JsonToken.None)
initialDepth = -1;
else if (!JsonTokenUtils.IsStartToken(reader.TokenType))
initialDepth = reader.Depth + 1;
else
initialDepth = reader.Depth;
WriteToken(reader, initialDepth, writeChildren, writeDateConstructorAsDate, writeComments);
}
internal void WriteToken(JsonReader reader, int initialDepth, bool writeChildren, bool writeDateConstructorAsDate, bool writeComments)
{
do
{
// write a JValue date when the constructor is for a date
if (writeDateConstructorAsDate && reader.TokenType == JsonToken.StartConstructor && string.Equals(reader.Value.ToString(), "Date", StringComparison.Ordinal))
{
WriteConstructorDate(reader);
}
else
{
if (reader.TokenType != JsonToken.Comment || writeComments)
{
WriteTokenInternal(reader.TokenType, reader.Value);
}
}
} while (
// stop if we have reached the end of the token being read
initialDepth - 1 < reader.Depth - (JsonTokenUtils.IsEndToken(reader.TokenType) ? 1 : 0)
&& writeChildren
&& reader.Read());
}
private void WriteTokenInternal(JsonToken tokenType, object value)
{
switch (tokenType)
{
case JsonToken.None:
// read to next
break;
case JsonToken.StartObject:
WriteStartObject();
break;
case JsonToken.StartArray:
WriteStartArray();
break;
case JsonToken.StartConstructor:
ValidationUtils.ArgumentNotNull(value, "value");
WriteStartConstructor(value.ToString());
break;
case JsonToken.PropertyName:
ValidationUtils.ArgumentNotNull(value, "value");
WritePropertyName(value.ToString());
break;
case JsonToken.Comment:
WriteComment((value != null) ? value.ToString() : null);
break;
case JsonToken.Integer:
ValidationUtils.ArgumentNotNull(value, "value");
#if !(NET20 || NET35 || PORTABLE || PORTABLE40)
if (value is BigInteger)
{
WriteValue((BigInteger)value);
}
else
#endif
{
WriteValue(Convert.ToInt64(value, CultureInfo.InvariantCulture));
}
break;
case JsonToken.Float:
ValidationUtils.ArgumentNotNull(value, "value");
if (value is decimal)
WriteValue((decimal)value);
else if (value is double)
WriteValue((double)value);
else if (value is float)
WriteValue((float)value);
else
WriteValue(Convert.ToDouble(value, CultureInfo.InvariantCulture));
break;
case JsonToken.String:
ValidationUtils.ArgumentNotNull(value, "value");
WriteValue(value.ToString());
break;
case JsonToken.Boolean:
ValidationUtils.ArgumentNotNull(value, "value");
WriteValue(Convert.ToBoolean(value, CultureInfo.InvariantCulture));
break;
case JsonToken.Null:
WriteNull();
break;
case JsonToken.Undefined:
WriteUndefined();
break;
case JsonToken.EndObject:
WriteEndObject();
break;
case JsonToken.EndArray:
WriteEndArray();
break;
case JsonToken.EndConstructor:
WriteEndConstructor();
break;
case JsonToken.Date:
ValidationUtils.ArgumentNotNull(value, "value");
#if !NET20
if (value is DateTimeOffset)
WriteValue((DateTimeOffset)value);
else
#endif
WriteValue(Convert.ToDateTime(value, CultureInfo.InvariantCulture));
break;
case JsonToken.Raw:
WriteRawValue((value != null) ? value.ToString() : null);
break;
case JsonToken.Bytes:
ValidationUtils.ArgumentNotNull(value, "value");
if (value is Guid)
WriteValue((Guid)value);
else
WriteValue((byte[])value);
break;
default:
throw MiscellaneousUtils.CreateArgumentOutOfRangeException("TokenType", tokenType, "Unexpected token type.");
}
}
private void WriteConstructorDate(JsonReader reader)
{
if (!reader.Read())
throw JsonWriterException.Create(this, "Unexpected end when reading date constructor.", null);
if (reader.TokenType != JsonToken.Integer)
throw JsonWriterException.Create(this, "Unexpected token when reading date constructor. Expected Integer, got " + reader.TokenType, null);
long ticks = (long)reader.Value;
DateTime date = DateTimeUtils.ConvertJavaScriptTicksToDateTime(ticks);
if (!reader.Read())
throw JsonWriterException.Create(this, "Unexpected end when reading date constructor.", null);
if (reader.TokenType != JsonToken.EndConstructor)
throw JsonWriterException.Create(this, "Unexpected token when reading date constructor. Expected EndConstructor, got " + reader.TokenType, null);
WriteValue(date);
}
private void WriteEnd(JsonContainerType type)
{
switch (type)
{
case JsonContainerType.Object:
WriteEndObject();
break;
case JsonContainerType.Array:
WriteEndArray();
break;
case JsonContainerType.Constructor:
WriteEndConstructor();
break;
default:
throw JsonWriterException.Create(this, "Unexpected type when writing end: " + type, null);
}
}
private void AutoCompleteAll()
{
while (Top > 0)
{
WriteEnd();
}
}
private JsonToken GetCloseTokenForType(JsonContainerType type)
{
switch (type)
{
case JsonContainerType.Object:
return JsonToken.EndObject;
case JsonContainerType.Array:
return JsonToken.EndArray;
case JsonContainerType.Constructor:
return JsonToken.EndConstructor;
default:
throw JsonWriterException.Create(this, "No close token for type: " + type, null);
}
}
private void AutoCompleteClose(JsonContainerType type)
{
// write closing symbol and calculate new state
int levelsToComplete = 0;
if (_currentPosition.Type == type)
{
levelsToComplete = 1;
}
else
{
int top = Top - 2;
for (int i = top; i >= 0; i--)
{
int currentLevel = top - i;
if (_stack[currentLevel].Type == type)
{
levelsToComplete = i + 2;
break;
}
}
}
if (levelsToComplete == 0)
throw JsonWriterException.Create(this, "No token to close.", null);
for (int i = 0; i < levelsToComplete; i++)
{
JsonToken token = GetCloseTokenForType(Pop());
if (_currentState == State.Property)
WriteNull();
if (_formatting == Formatting.Indented)
{
if (_currentState != State.ObjectStart && _currentState != State.ArrayStart)
WriteIndent();
}
WriteEnd(token);
JsonContainerType currentLevelType = Peek();
switch (currentLevelType)
{
case JsonContainerType.Object:
_currentState = State.Object;
break;
case JsonContainerType.Array:
_currentState = State.Array;
break;
case JsonContainerType.Constructor:
_currentState = State.Array;
break;
case JsonContainerType.None:
_currentState = State.Start;
break;
default:
throw JsonWriterException.Create(this, "Unknown JsonType: " + currentLevelType, null);
}
}
}
/// <summary>
/// Writes the specified end token.
/// </summary>
/// <param name="token">The end token to write.</param>
protected virtual void WriteEnd(JsonToken token)
{
}
/// <summary>
/// Writes indent characters.
/// </summary>
protected virtual void WriteIndent()
{
}
/// <summary>
/// Writes the JSON value delimiter.
/// </summary>
protected virtual void WriteValueDelimiter()
{
}
/// <summary>
/// Writes an indent space.
/// </summary>
protected virtual void WriteIndentSpace()
{
}
internal void AutoComplete(JsonToken tokenBeingWritten)
{
// gets new state based on the current state and what is being written
State newState = StateArray[(int)tokenBeingWritten][(int)_currentState];
if (newState == State.Error)
throw JsonWriterException.Create(this, "Token {0} in state {1} would result in an invalid JSON object.".FormatWith(CultureInfo.InvariantCulture, tokenBeingWritten.ToString(), _currentState.ToString()), null);
if ((_currentState == State.Object || _currentState == State.Array || _currentState == State.Constructor) && tokenBeingWritten != JsonToken.Comment)
{
WriteValueDelimiter();
}
if (_formatting == Formatting.Indented)
{
if (_currentState == State.Property)
WriteIndentSpace();
// don't indent a property when it is the first token to be written (i.e. at the start)
if ((_currentState == State.Array || _currentState == State.ArrayStart || _currentState == State.Constructor || _currentState == State.ConstructorStart)
|| (tokenBeingWritten == JsonToken.PropertyName && _currentState != State.Start))
WriteIndent();
}
_currentState = newState;
}
#region WriteValue methods
/// <summary>
/// Writes a null value.
/// </summary>
public virtual void WriteNull()
{
InternalWriteValue(JsonToken.Null);
}
/// <summary>
/// Writes an undefined value.
/// </summary>
public virtual void WriteUndefined()
{
InternalWriteValue(JsonToken.Undefined);
}
/// <summary>
/// Writes raw JSON without changing the writer's state.
/// </summary>
/// <param name="json">The raw JSON to write.</param>
public virtual void WriteRaw(string json)
{
InternalWriteRaw();
}
/// <summary>
/// Writes raw JSON where a value is expected and updates the writer's state.
/// </summary>
/// <param name="json">The raw JSON to write.</param>
public virtual void WriteRawValue(string json)
{
// hack. want writer to change state as if a value had been written
UpdateScopeWithFinishedValue();
AutoComplete(JsonToken.Undefined);
WriteRaw(json);
}
/// <summary>
/// Writes a <see cref="String"/> value.
/// </summary>
/// <param name="value">The <see cref="String"/> value to write.</param>
public virtual void WriteValue(string value)
{
InternalWriteValue(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Int32"/> value.
/// </summary>
/// <param name="value">The <see cref="Int32"/> value to write.</param>
public virtual void WriteValue(int value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="UInt32"/> value.
/// </summary>
/// <param name="value">The <see cref="UInt32"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(uint value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Int64"/> value.
/// </summary>
/// <param name="value">The <see cref="Int64"/> value to write.</param>
public virtual void WriteValue(long value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="UInt64"/> value.
/// </summary>
/// <param name="value">The <see cref="UInt64"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ulong value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Single"/> value.
/// </summary>
/// <param name="value">The <see cref="Single"/> value to write.</param>
public virtual void WriteValue(float value)
{
InternalWriteValue(JsonToken.Float);
}
/// <summary>
/// Writes a <see cref="Double"/> value.
/// </summary>
/// <param name="value">The <see cref="Double"/> value to write.</param>
public virtual void WriteValue(double value)
{
InternalWriteValue(JsonToken.Float);
}
/// <summary>
/// Writes a <see cref="Boolean"/> value.
/// </summary>
/// <param name="value">The <see cref="Boolean"/> value to write.</param>
public virtual void WriteValue(bool value)
{
InternalWriteValue(JsonToken.Boolean);
}
/// <summary>
/// Writes a <see cref="Int16"/> value.
/// </summary>
/// <param name="value">The <see cref="Int16"/> value to write.</param>
public virtual void WriteValue(short value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="UInt16"/> value.
/// </summary>
/// <param name="value">The <see cref="UInt16"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ushort value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Char"/> value.
/// </summary>
/// <param name="value">The <see cref="Char"/> value to write.</param>
public virtual void WriteValue(char value)
{
InternalWriteValue(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Byte"/> value.
/// </summary>
/// <param name="value">The <see cref="Byte"/> value to write.</param>
public virtual void WriteValue(byte value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="SByte"/> value.
/// </summary>
/// <param name="value">The <see cref="SByte"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(sbyte value)
{
InternalWriteValue(JsonToken.Integer);
}
/// <summary>
/// Writes a <see cref="Decimal"/> value.
/// </summary>
/// <param name="value">The <see cref="Decimal"/> value to write.</param>
public virtual void WriteValue(decimal value)
{
InternalWriteValue(JsonToken.Float);
}
/// <summary>
/// Writes a <see cref="DateTime"/> value.
/// </summary>
/// <param name="value">The <see cref="DateTime"/> value to write.</param>
public virtual void WriteValue(DateTime value)
{
InternalWriteValue(JsonToken.Date);
}
#if !NET20
/// <summary>
/// Writes a <see cref="DateTimeOffset"/> value.
/// </summary>
/// <param name="value">The <see cref="DateTimeOffset"/> value to write.</param>
public virtual void WriteValue(DateTimeOffset value)
{
InternalWriteValue(JsonToken.Date);
}
#endif
/// <summary>
/// Writes a <see cref="Guid"/> value.
/// </summary>
/// <param name="value">The <see cref="Guid"/> value to write.</param>
public virtual void WriteValue(Guid value)
{
InternalWriteValue(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="TimeSpan"/> value.
/// </summary>
/// <param name="value">The <see cref="TimeSpan"/> value to write.</param>
public virtual void WriteValue(TimeSpan value)
{
InternalWriteValue(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Nullable{Int32}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Int32}"/> value to write.</param>
public virtual void WriteValue(int? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{UInt32}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{UInt32}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(uint? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Int64}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Int64}"/> value to write.</param>
public virtual void WriteValue(long? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{UInt64}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{UInt64}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ulong? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Single}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Single}"/> value to write.</param>
public virtual void WriteValue(float? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Double}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Double}"/> value to write.</param>
public virtual void WriteValue(double? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Boolean}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Boolean}"/> value to write.</param>
public virtual void WriteValue(bool? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Int16}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Int16}"/> value to write.</param>
public virtual void WriteValue(short? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{UInt16}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{UInt16}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(ushort? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Char}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Char}"/> value to write.</param>
public virtual void WriteValue(char? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Byte}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Byte}"/> value to write.</param>
public virtual void WriteValue(byte? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{SByte}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{SByte}"/> value to write.</param>
[CLSCompliant(false)]
public virtual void WriteValue(sbyte? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{Decimal}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Decimal}"/> value to write.</param>
public virtual void WriteValue(decimal? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{DateTime}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{DateTime}"/> value to write.</param>
public virtual void WriteValue(DateTime? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
#if !NET20
/// <summary>
/// Writes a <see cref="Nullable{DateTimeOffset}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{DateTimeOffset}"/> value to write.</param>
public virtual void WriteValue(DateTimeOffset? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
#endif
/// <summary>
/// Writes a <see cref="Nullable{Guid}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{Guid}"/> value to write.</param>
public virtual void WriteValue(Guid? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Nullable{TimeSpan}"/> value.
/// </summary>
/// <param name="value">The <see cref="Nullable{TimeSpan}"/> value to write.</param>
public virtual void WriteValue(TimeSpan? value)
{
if (value == null)
WriteNull();
else
WriteValue(value.Value);
}
/// <summary>
/// Writes a <see cref="Byte"/>[] value.
/// </summary>
/// <param name="value">The <see cref="Byte"/>[] value to write.</param>
public virtual void WriteValue(byte[] value)
{
if (value == null)
WriteNull();
else
InternalWriteValue(JsonToken.Bytes);
}
/// <summary>
/// Writes a <see cref="Uri"/> value.
/// </summary>
/// <param name="value">The <see cref="Uri"/> value to write.</param>
public virtual void WriteValue(Uri value)
{
if (value == null)
WriteNull();
else
InternalWriteValue(JsonToken.String);
}
/// <summary>
/// Writes a <see cref="Object"/> value.
/// An error will raised if the value cannot be written as a single JSON token.
/// </summary>
/// <param name="value">The <see cref="Object"/> value to write.</param>
public virtual void WriteValue(object value)
{
if (value == null)
{
WriteNull();
}
else
{
#if !(NET20 || NET35 || PORTABLE || PORTABLE40)
// this is here because adding a WriteValue(BigInteger) to JsonWriter will
// mean the user has to add a reference to System.Numerics.dll
if (value is BigInteger)
throw CreateUnsupportedTypeException(this, value);
#endif
WriteValue(this, ConvertUtils.GetTypeCode(value.GetType()), value);
}
}
#endregion
/// <summary>
/// Writes out a comment <code>/*...*/</code> containing the specified text.
/// </summary>
/// <param name="text">Text to place inside the comment.</param>
public virtual void WriteComment(string text)
{
InternalWriteComment();
}
/// <summary>
/// Writes out the given white space.
/// </summary>
/// <param name="ws">The string of white space characters.</param>
public virtual void WriteWhitespace(string ws)
{
InternalWriteWhitespace(ws);
}
void IDisposable.Dispose()
{
Dispose(true);
}
private void Dispose(bool disposing)
{
if (_currentState != State.Closed)
Close();
}
internal static void WriteValue(JsonWriter writer, PrimitiveTypeCode typeCode, object value)
{
switch (typeCode)
{
case PrimitiveTypeCode.Char:
writer.WriteValue((char)value);
break;
case PrimitiveTypeCode.CharNullable:
writer.WriteValue((value == null) ? (char?)null : (char)value);
break;
case PrimitiveTypeCode.Boolean:
writer.WriteValue((bool)value);
break;
case PrimitiveTypeCode.BooleanNullable:
writer.WriteValue((value == null) ? (bool?)null : (bool)value);
break;
case PrimitiveTypeCode.SByte:
writer.WriteValue((sbyte)value);
break;
case PrimitiveTypeCode.SByteNullable:
writer.WriteValue((value == null) ? (sbyte?)null : (sbyte)value);
break;
case PrimitiveTypeCode.Int16:
writer.WriteValue((short)value);
break;
case PrimitiveTypeCode.Int16Nullable:
writer.WriteValue((value == null) ? (short?)null : (short)value);
break;
case PrimitiveTypeCode.UInt16:
writer.WriteValue((ushort)value);
break;
case PrimitiveTypeCode.UInt16Nullable:
writer.WriteValue((value == null) ? (ushort?)null : (ushort)value);
break;
case PrimitiveTypeCode.Int32:
writer.WriteValue((int)value);
break;
case PrimitiveTypeCode.Int32Nullable:
writer.WriteValue((value == null) ? (int?)null : (int)value);
break;
case PrimitiveTypeCode.Byte:
writer.WriteValue((byte)value);
break;
case PrimitiveTypeCode.ByteNullable:
writer.WriteValue((value == null) ? (byte?)null : (byte)value);
break;
case PrimitiveTypeCode.UInt32:
writer.WriteValue((uint)value);
break;
case PrimitiveTypeCode.UInt32Nullable:
writer.WriteValue((value == null) ? (uint?)null : (uint)value);
break;
case PrimitiveTypeCode.Int64:
writer.WriteValue((long)value);
break;
case PrimitiveTypeCode.Int64Nullable:
writer.WriteValue((value == null) ? (long?)null : (long)value);
break;
case PrimitiveTypeCode.UInt64:
writer.WriteValue((ulong)value);
break;
case PrimitiveTypeCode.UInt64Nullable:
writer.WriteValue((value == null) ? (ulong?)null : (ulong)value);
break;
case PrimitiveTypeCode.Single:
writer.WriteValue((float)value);
break;
case PrimitiveTypeCode.SingleNullable:
writer.WriteValue((value == null) ? (float?)null : (float)value);
break;
case PrimitiveTypeCode.Double:
writer.WriteValue((double)value);
break;
case PrimitiveTypeCode.DoubleNullable:
writer.WriteValue((value == null) ? (double?)null : (double)value);
break;
case PrimitiveTypeCode.DateTime:
writer.WriteValue((DateTime)value);
break;
case PrimitiveTypeCode.DateTimeNullable:
writer.WriteValue((value == null) ? (DateTime?)null : (DateTime)value);
break;
#if !NET20
case PrimitiveTypeCode.DateTimeOffset:
writer.WriteValue((DateTimeOffset)value);
break;
case PrimitiveTypeCode.DateTimeOffsetNullable:
writer.WriteValue((value == null) ? (DateTimeOffset?)null : (DateTimeOffset)value);
break;
#endif
case PrimitiveTypeCode.Decimal:
writer.WriteValue((decimal)value);
break;
case PrimitiveTypeCode.DecimalNullable:
writer.WriteValue((value == null) ? (decimal?)null : (decimal)value);
break;
case PrimitiveTypeCode.Guid:
writer.WriteValue((Guid)value);
break;
case PrimitiveTypeCode.GuidNullable:
writer.WriteValue((value == null) ? (Guid?)null : (Guid)value);
break;
case PrimitiveTypeCode.TimeSpan:
writer.WriteValue((TimeSpan)value);
break;
case PrimitiveTypeCode.TimeSpanNullable:
writer.WriteValue((value == null) ? (TimeSpan?)null : (TimeSpan)value);
break;
#if !(PORTABLE || PORTABLE40 || NET35 || NET20)
case PrimitiveTypeCode.BigInteger:
// this will call to WriteValue(object)
writer.WriteValue((BigInteger)value);
break;
case PrimitiveTypeCode.BigIntegerNullable:
// this will call to WriteValue(object)
writer.WriteValue((value == null) ? (BigInteger?)null : (BigInteger)value);
break;
#endif
case PrimitiveTypeCode.Uri:
writer.WriteValue((Uri)value);
break;
case PrimitiveTypeCode.String:
writer.WriteValue((string)value);
break;
case PrimitiveTypeCode.Bytes:
writer.WriteValue((byte[])value);
break;
#if !(PORTABLE || DOTNET)
case PrimitiveTypeCode.DBNull:
writer.WriteNull();
break;
#endif
default:
#if !PORTABLE
if (value is IConvertible)
{
// the value is a non-standard IConvertible
// convert to the underlying value and retry
IConvertible convertable = (IConvertible)value;
TypeInformation typeInformation = ConvertUtils.GetTypeInformation(convertable);
// if convertable has an underlying typecode of Object then attempt to convert it to a string
PrimitiveTypeCode resolvedTypeCode = (typeInformation.TypeCode == PrimitiveTypeCode.Object) ? PrimitiveTypeCode.String : typeInformation.TypeCode;
Type resolvedType = (typeInformation.TypeCode == PrimitiveTypeCode.Object) ? typeof(string) : typeInformation.Type;
object convertedValue = convertable.ToType(resolvedType, CultureInfo.InvariantCulture);
WriteValue(writer, resolvedTypeCode, convertedValue);
break;
}
else
#endif
{
throw CreateUnsupportedTypeException(writer, value);
}
}
}
private static JsonWriterException CreateUnsupportedTypeException(JsonWriter writer, object value)
{
return JsonWriterException.Create(writer, "Unsupported type: {0}. Use the JsonSerializer class to get the object's JSON representation.".FormatWith(CultureInfo.InvariantCulture, value.GetType()), null);
}
/// <summary>
/// Sets the state of the JsonWriter,
/// </summary>
/// <param name="token">The JsonToken being written.</param>
/// <param name="value">The value being written.</param>
protected void SetWriteState(JsonToken token, object value)
{
switch (token)
{
case JsonToken.StartObject:
InternalWriteStart(token, JsonContainerType.Object);
break;
case JsonToken.StartArray:
InternalWriteStart(token, JsonContainerType.Array);
break;
case JsonToken.StartConstructor:
InternalWriteStart(token, JsonContainerType.Constructor);
break;
case JsonToken.PropertyName:
if (!(value is string))
throw new ArgumentException("A name is required when setting property name state.", "value");
InternalWritePropertyName((string)value);
break;
case JsonToken.Comment:
InternalWriteComment();
break;
case JsonToken.Raw:
InternalWriteRaw();
break;
case JsonToken.Integer:
case JsonToken.Float:
case JsonToken.String:
case JsonToken.Boolean:
case JsonToken.Date:
case JsonToken.Bytes:
case JsonToken.Null:
case JsonToken.Undefined:
InternalWriteValue(token);
break;
case JsonToken.EndObject:
InternalWriteEnd(JsonContainerType.Object);
break;
case JsonToken.EndArray:
InternalWriteEnd(JsonContainerType.Array);
break;
case JsonToken.EndConstructor:
InternalWriteEnd(JsonContainerType.Constructor);
break;
default:
throw new ArgumentOutOfRangeException("token");
}
}
internal void InternalWriteEnd(JsonContainerType container)
{
AutoCompleteClose(container);
}
internal void InternalWritePropertyName(string name)
{
_currentPosition.PropertyName = name;
AutoComplete(JsonToken.PropertyName);
}
internal void InternalWriteRaw()
{
}
internal void InternalWriteStart(JsonToken token, JsonContainerType container)
{
UpdateScopeWithFinishedValue();
AutoComplete(token);
Push(container);
}
internal void InternalWriteValue(JsonToken token)
{
UpdateScopeWithFinishedValue();
AutoComplete(token);
}
internal void InternalWriteWhitespace(string ws)
{
if (ws != null)
{
if (!StringUtils.IsWhiteSpace(ws))
throw JsonWriterException.Create(this, "Only white space characters should be used.", null);
}
}
internal void InternalWriteComment()
{
AutoComplete(JsonToken.Comment);
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.ComponentModel.Composition;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.CodeAnalysis.CodeActions;
using Microsoft.CodeAnalysis.Editor.Shared.Utilities;
using Microsoft.CodeAnalysis.Editor.Undo;
using Microsoft.CodeAnalysis.Navigation;
using Microsoft.CodeAnalysis.Notification;
using Microsoft.CodeAnalysis.Shared.Extensions;
using Microsoft.CodeAnalysis.Shared.Utilities;
using Microsoft.CodeAnalysis.Text;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.CodeActions
{
[Export(typeof(ICodeActionEditHandlerService))]
internal class CodeActionEditHandlerService : ForegroundThreadAffinitizedObject, ICodeActionEditHandlerService
{
private readonly IPreviewFactoryService _previewService;
private readonly IInlineRenameService _renameService;
private readonly ITextBufferAssociatedViewService _associatedViewService;
[ImportingConstructor]
public CodeActionEditHandlerService(
IPreviewFactoryService previewService,
IInlineRenameService renameService,
ITextBufferAssociatedViewService associatedViewService)
{
_previewService = previewService;
_renameService = renameService;
_associatedViewService = associatedViewService;
}
public ITextBufferAssociatedViewService AssociatedViewService
{
get { return _associatedViewService; }
}
public SolutionPreviewResult GetPreviews(
Workspace workspace, IEnumerable<CodeActionOperation> operations, CancellationToken cancellationToken)
{
if (operations == null)
{
return null;
}
SolutionPreviewResult currentResult = null;
foreach (var op in operations)
{
cancellationToken.ThrowIfCancellationRequested();
var applyChanges = op as ApplyChangesOperation;
if (applyChanges != null)
{
var oldSolution = workspace.CurrentSolution;
var newSolution = applyChanges.ChangedSolution.WithMergedLinkedFileChangesAsync(oldSolution, cancellationToken: cancellationToken).WaitAndGetResult(cancellationToken);
var preview = _previewService.GetSolutionPreviews(
oldSolution, newSolution, cancellationToken);
if (preview != null && !preview.IsEmpty)
{
currentResult = SolutionPreviewResult.Merge(currentResult, preview);
continue;
}
}
var previewOp = op as PreviewOperation;
if (previewOp != null)
{
currentResult = SolutionPreviewResult.Merge(currentResult,
new SolutionPreviewResult(new SolutionPreviewItem(
projectId: null, documentId: null,
lazyPreview: c => previewOp.GetPreviewAsync(c))));
continue;
}
var title = op.Title;
if (title != null)
{
currentResult = SolutionPreviewResult.Merge(currentResult,
new SolutionPreviewResult(new SolutionPreviewItem(
projectId: null, documentId: null, text: title)));
continue;
}
}
return currentResult;
}
public async Task ApplyAsync(
Workspace workspace, Document fromDocument,
IEnumerable<CodeActionOperation> operations,
string title, IProgressTracker progressTracker,
CancellationToken cancellationToken)
{
this.AssertIsForeground();
if (_renameService.ActiveSession != null)
{
workspace.Services.GetService<INotificationService>()?.SendNotification(
EditorFeaturesResources.Cannot_apply_operation_while_a_rename_session_is_active,
severity: NotificationSeverity.Error);
return;
}
#if DEBUG
var documentErrorLookup = new HashSet<DocumentId>();
foreach (var project in workspace.CurrentSolution.Projects)
{
foreach (var document in project.Documents)
{
// ConfigureAwait(true) so we come back to the same thread as
// we do all application on the UI thread.
if (!await document.HasAnyErrorsAsync(cancellationToken).ConfigureAwait(true))
{
documentErrorLookup.Add(document.Id);
}
}
}
#endif
var oldSolution = workspace.CurrentSolution;
Solution updatedSolution = oldSolution;
var operationsList = operations.ToList();
if (operationsList.Count > 1)
{
// Make a linked undo to wrap all these operations. This way we should
// be able to undo them all with one user action.
//
// Note: we only wrap things with an undo action if:
//
// 1. We have multiple operations (this code here).
// 2. We have a SolutionChangedAction and we're making changes to multiple
// documents. (Below in ProcessOperations).
//
// Or, in other words, if we know we're only editing a single file, then we
// don't wrap things with a global undo action.
//
// The reason for this is a global undo forces all files to save. And that's
// rather a heavyweight and unexpected experience for users (for the common
// case where a single file got edited).
//
// When we have multiple operations we assume that this is going to be
// more heavyweight. (After all, a full Roslyn solution change can be represented
// with a single operation). As such, we wrap with an undo so all the operations
// can be undone at once.
using (var transaction = workspace.OpenGlobalUndoTransaction(title))
{
// ConfigureAwait(true) so we come back to the same thread as
// we do all application on the UI thread.
updatedSolution = await ProcessOperationsAsync(
workspace, fromDocument, title, oldSolution,
updatedSolution, operationsList, progressTracker,
cancellationToken).ConfigureAwait(true);
// link current file in the global undo transaction
if (fromDocument != null)
{
transaction.AddDocument(fromDocument.Id);
}
transaction.Commit();
}
}
else
{
// ConfigureAwait(true) so we come back to the same thread as
// we do all application on the UI thread.
updatedSolution = await ProcessOperationsAsync(
workspace, fromDocument, title, oldSolution, updatedSolution, operationsList,
progressTracker, cancellationToken).ConfigureAwait(true);
}
#if DEBUG
foreach (var project in workspace.CurrentSolution.Projects)
{
foreach (var document in project.Documents)
{
if (documentErrorLookup.Contains(document.Id))
{
document.VerifyNoErrorsAsync("CodeAction introduced error in error-free code", cancellationToken).Wait(cancellationToken);
}
}
}
#endif
TryStartRenameSession(workspace, oldSolution, updatedSolution, cancellationToken);
}
private static async Task<Solution> ProcessOperationsAsync(
Workspace workspace, Document fromDocument, string title, Solution oldSolution, Solution updatedSolution, List<CodeActionOperation> operationsList,
IProgressTracker progressTracker, CancellationToken cancellationToken)
{
foreach (var operation in operationsList)
{
var applyChanges = operation as ApplyChangesOperation;
if (applyChanges == null)
{
operation.Apply(workspace, cancellationToken);
continue;
}
// there must be only one ApplyChangesOperation, we will ignore all other ones.
if (updatedSolution == oldSolution)
{
updatedSolution = applyChanges.ChangedSolution;
var projectChanges = updatedSolution.GetChanges(oldSolution).GetProjectChanges();
var changedDocuments = projectChanges.SelectMany(pd => pd.GetChangedDocuments());
var changedAdditionalDocuments = projectChanges.SelectMany(pd => pd.GetChangedAdditionalDocuments());
var changedFiles = changedDocuments.Concat(changedAdditionalDocuments).ToList();
// 0 file changes
if (changedFiles.Count == 0)
{
operation.Apply(workspace, cancellationToken);
continue;
}
// 1 file change
SourceText text = null;
if (changedFiles.Count == 1)
{
if (changedDocuments.Any())
{
// ConfigureAwait(true) so we come back to the same thread as
// we do all application on the UI thread.
text = await oldSolution.GetDocument(changedDocuments.Single()).GetTextAsync(cancellationToken).ConfigureAwait(true);
}
else if (changedAdditionalDocuments.Any())
{
// ConfigureAwait(true) so we come back to the same thread as
// we do all application on the UI thread.
text = await oldSolution.GetAdditionalDocument(changedAdditionalDocuments.Single()).GetTextAsync(cancellationToken).ConfigureAwait(true);
}
}
if (text != null)
{
using (workspace.Services.GetService<ISourceTextUndoService>().RegisterUndoTransaction(text, title))
{
operation.Apply(workspace, cancellationToken);
continue;
}
}
// multiple file changes
using (var undoTransaction = workspace.OpenGlobalUndoTransaction(title))
{
operation.Apply(workspace, progressTracker, cancellationToken);
// link current file in the global undo transaction
if (fromDocument != null)
{
undoTransaction.AddDocument(fromDocument.Id);
}
undoTransaction.Commit();
}
continue;
}
}
return updatedSolution;
}
private void TryStartRenameSession(Workspace workspace, Solution oldSolution, Solution newSolution, CancellationToken cancellationToken)
{
var changedDocuments = newSolution.GetChangedDocuments(oldSolution);
foreach (var documentId in changedDocuments)
{
var document = newSolution.GetDocument(documentId);
if (!document.SupportsSyntaxTree)
{
continue;
}
var root = document.GetSyntaxRootSynchronously(cancellationToken);
var renameTokenOpt = root.GetAnnotatedNodesAndTokens(RenameAnnotation.Kind)
.Where(s => s.IsToken)
.Select(s => s.AsToken())
.FirstOrNullable();
if (renameTokenOpt.HasValue)
{
// It's possible that the workspace's current solution is not the same as
// newSolution. This can happen if the workspace host performs other edits
// during ApplyChanges, such as in the Venus scenario where indentation and
// formatting can happen. To work around this, we create a SyntaxPath to the
// rename token in the newSolution and resolve it to the current solution.
var pathToRenameToken = new SyntaxPath(renameTokenOpt.Value);
var latestDocument = workspace.CurrentSolution.GetDocument(documentId);
var latestRoot = latestDocument.GetSyntaxRootSynchronously(cancellationToken);
SyntaxNodeOrToken resolvedRenameToken;
if (pathToRenameToken.TryResolve(latestRoot, out resolvedRenameToken) &&
resolvedRenameToken.IsToken)
{
var editorWorkspace = workspace;
var navigationService = editorWorkspace.Services.GetService<IDocumentNavigationService>();
if (navigationService.TryNavigateToSpan(editorWorkspace, documentId, resolvedRenameToken.Span))
{
var openDocument = workspace.CurrentSolution.GetDocument(documentId);
var openRoot = openDocument.GetSyntaxRootSynchronously(cancellationToken);
// NOTE: We need to resolve the syntax path again in case VB line commit kicked in
// due to the navigation.
// TODO(DustinCa): We still have a potential problem here with VB line commit,
// because it can insert tokens and all sorts of other business, which could
// wind up with us not being able to resolve the token.
if (pathToRenameToken.TryResolve(openRoot, out resolvedRenameToken) &&
resolvedRenameToken.IsToken)
{
var snapshot = openDocument.GetTextAsync(cancellationToken).WaitAndGetResult(cancellationToken).FindCorrespondingEditorTextSnapshot();
if (snapshot != null)
{
_renameService.StartInlineSession(openDocument, resolvedRenameToken.AsToken().Span, cancellationToken);
}
}
}
}
return;
}
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Claims;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.DataProtection;
using Microsoft.AspNetCore.Identity;
using Microsoft.Extensions.Logging;
using OrchardCore.Modules;
using OrchardCore.Security.Services;
using OrchardCore.Users.Handlers;
using OrchardCore.Users.Indexes;
using OrchardCore.Users.Models;
using YesSql;
namespace OrchardCore.Users.Services
{
public class UserStore :
IUserClaimStore<IUser>,
IUserRoleStore<IUser>,
IUserPasswordStore<IUser>,
IUserEmailStore<IUser>,
IUserSecurityStampStore<IUser>,
IUserLoginStore<IUser>,
IUserLockoutStore<IUser>,
IUserAuthenticationTokenStore<IUser>
{
private const string TokenProtector = "OrchardCore.UserStore.Token";
private readonly ISession _session;
private readonly IRoleService _roleService;
private readonly ILookupNormalizer _keyNormalizer;
private readonly IUserIdGenerator _userIdGenerator;
private readonly ILogger _logger;
private readonly IDataProtectionProvider _dataProtectionProvider;
public UserStore(ISession session,
IRoleService roleService,
ILookupNormalizer keyNormalizer,
IUserIdGenerator userIdGenerator,
ILogger<UserStore> logger,
IEnumerable<IUserEventHandler> handlers,
IDataProtectionProvider dataProtectionProvider)
{
_session = session;
_roleService = roleService;
_keyNormalizer = keyNormalizer;
_userIdGenerator = userIdGenerator;
_logger = logger;
_dataProtectionProvider = dataProtectionProvider;
Handlers = handlers;
}
public IEnumerable<IUserEventHandler> Handlers { get; private set; }
public void Dispose()
{
}
public string NormalizeKey(string key)
{
return _keyNormalizer == null ? key : _keyNormalizer.NormalizeName(key);
}
#region IUserStore<IUser>
public async Task<IdentityResult> CreateAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (!(user is User newUser))
{
throw new ArgumentException("Expected a User instance.", nameof(user));
}
var newUserId = newUser.UserId;
if (String.IsNullOrEmpty(newUserId))
{
// Due to database collation we normalize the userId to lower invariant.
newUserId = _userIdGenerator.GenerateUniqueId(user).ToLowerInvariant();
}
try
{
var attempts = 10;
while (await _session.QueryIndex<UserIndex>(x => x.UserId == newUserId).CountAsync() != 0)
{
if (attempts-- == 0)
{
throw new ApplicationException("Couldn't generate a unique user id. Too many attempts.");
}
newUserId = _userIdGenerator.GenerateUniqueId(user).ToLowerInvariant();
}
newUser.UserId = newUserId;
var context = new UserCreateContext(user);
await Handlers.InvokeAsync((handler, context) => handler.CreatingAsync(context), context, _logger);
if (context.Cancel)
{
return IdentityResult.Failed();
}
_session.Save(user);
await _session.SaveChangesAsync();
await Handlers.InvokeAsync((handler, context) => handler.CreatedAsync(context), context, _logger);
}
catch (Exception e)
{
_logger.LogError(e, "Unexpected error while creating a new user.");
return IdentityResult.Failed();
}
return IdentityResult.Success;
}
public async Task<IdentityResult> DeleteAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
try
{
var context = new UserDeleteContext(user);
await Handlers.InvokeAsync((handler, context) => handler.DeletingAsync(context), context, _logger);
if (context.Cancel)
{
return IdentityResult.Failed();
}
_session.Delete(user);
await _session.SaveChangesAsync();
await Handlers.InvokeAsync((handler, context) => handler.DeletedAsync(context), context, _logger);
}
catch (Exception e)
{
_logger.LogError(e, "Unexpected error while deleting a user.");
return IdentityResult.Failed();
}
return IdentityResult.Success;
}
public async Task<IUser> FindByIdAsync(string userId, CancellationToken cancellationToken = default(CancellationToken))
{
return await _session.Query<User, UserIndex>(u => u.UserId == userId).FirstOrDefaultAsync();
}
public async Task<IUser> FindByNameAsync(string normalizedUserName, CancellationToken cancellationToken = default(CancellationToken))
{
return await _session.Query<User, UserIndex>(u => u.NormalizedUserName == normalizedUserName).FirstOrDefaultAsync();
}
public Task<string> GetNormalizedUserNameAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).NormalizedUserName);
}
public Task<string> GetUserIdAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).UserId);
}
public Task<string> GetUserNameAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).UserName);
}
public Task SetNormalizedUserNameAsync(IUser user, string normalizedName, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).NormalizedUserName = normalizedName;
return Task.CompletedTask;
}
public Task SetUserNameAsync(IUser user, string userName, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).UserName = userName;
return Task.CompletedTask;
}
public async Task<IdentityResult> UpdateAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
try
{
var context = new UserUpdateContext(user);
await Handlers.InvokeAsync((handler, context) => handler.UpdatingAsync(context), context, _logger);
if (context.Cancel)
{
return IdentityResult.Failed();
}
_session.Save(user);
await _session.SaveChangesAsync();
await Handlers.InvokeAsync((handler, context) => handler.UpdatedAsync(context), context, _logger);
}
catch (Exception e)
{
_logger.LogError(e, "Unexpected error while updating a user.");
return IdentityResult.Failed();
}
return IdentityResult.Success;
}
#endregion IUserStore<IUser>
#region IUserPasswordStore<IUser>
public Task<string> GetPasswordHashAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).PasswordHash);
}
public Task SetPasswordHashAsync(IUser user, string passwordHash, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).PasswordHash = passwordHash;
return Task.CompletedTask;
}
public Task<bool> HasPasswordAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).PasswordHash != null);
}
#endregion IUserPasswordStore<IUser>
#region ISecurityStampValidator<IUser>
public Task SetSecurityStampAsync(IUser user, string stamp, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).SecurityStamp = stamp;
return Task.CompletedTask;
}
public Task<string> GetSecurityStampAsync(IUser user, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).SecurityStamp);
}
#endregion ISecurityStampValidator<IUser>
#region IUserEmailStore<IUser>
public Task SetEmailAsync(IUser user, string email, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).Email = email;
return Task.CompletedTask;
}
public Task<string> GetEmailAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).Email);
}
public Task<bool> GetEmailConfirmedAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).EmailConfirmed);
}
public Task SetEmailConfirmedAsync(IUser user, bool confirmed, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).EmailConfirmed = confirmed;
return Task.CompletedTask;
}
public async Task<IUser> FindByEmailAsync(string normalizedEmail, CancellationToken cancellationToken)
{
return await _session.Query<User, UserIndex>(u => u.NormalizedEmail == normalizedEmail).FirstOrDefaultAsync();
}
public Task<string> GetNormalizedEmailAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).NormalizedEmail);
}
public Task SetNormalizedEmailAsync(IUser user, string normalizedEmail, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).NormalizedEmail = normalizedEmail;
return Task.CompletedTask;
}
#endregion IUserEmailStore<IUser>
#region IUserRoleStore<IUser>
public async Task AddToRoleAsync(IUser user, string normalizedRoleName, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
var roleNames = await _roleService.GetRoleNamesAsync();
var roleName = roleNames?.FirstOrDefault(r => NormalizeKey(r) == normalizedRoleName);
if (string.IsNullOrWhiteSpace(roleName))
{
throw new InvalidOperationException($"Role {normalizedRoleName} does not exist.");
}
((User)user).RoleNames.Add(roleName);
}
public async Task RemoveFromRoleAsync(IUser user, string normalizedRoleName, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
var roleNames = await _roleService.GetRoleNamesAsync();
var roleName = roleNames?.FirstOrDefault(r => NormalizeKey(r) == normalizedRoleName);
if (string.IsNullOrWhiteSpace(roleName))
{
throw new InvalidOperationException($"Role {normalizedRoleName} does not exist.");
}
((User)user).RoleNames.Remove(roleName);
}
public Task<IList<string>> GetRolesAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult<IList<string>>(((User)user).RoleNames);
}
public Task<bool> IsInRoleAsync(IUser user, string normalizedRoleName, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (string.IsNullOrWhiteSpace(normalizedRoleName))
{
throw new ArgumentException("Value cannot be null or empty.", nameof(normalizedRoleName));
}
return Task.FromResult(((User)user).RoleNames.Contains(normalizedRoleName, StringComparer.OrdinalIgnoreCase));
}
public async Task<IList<IUser>> GetUsersInRoleAsync(string normalizedRoleName, CancellationToken cancellationToken)
{
if (string.IsNullOrEmpty(normalizedRoleName))
{
throw new ArgumentNullException(nameof(normalizedRoleName));
}
var users = await _session.Query<User, UserByRoleNameIndex>(u => u.RoleName == normalizedRoleName).ListAsync();
return users == null ? new List<IUser>() : users.ToList<IUser>();
}
#endregion IUserRoleStore<IUser>
#region IUserLoginStore<IUser>
public Task AddLoginAsync(IUser user, UserLoginInfo login, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (login == null)
{
throw new ArgumentNullException(nameof(login));
}
if (((User)user).LoginInfos.Any(i => i.LoginProvider == login.LoginProvider))
throw new InvalidOperationException($"Provider {login.LoginProvider} is already linked for {user.UserName}");
((User)user).LoginInfos.Add(login);
return Task.CompletedTask;
}
public async Task<IUser> FindByLoginAsync(string loginProvider, string providerKey, CancellationToken cancellationToken)
{
return await _session.Query<User, UserByLoginInfoIndex>(u => u.LoginProvider == loginProvider && u.ProviderKey == providerKey).FirstOrDefaultAsync();
}
public Task<IList<UserLoginInfo>> GetLoginsAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult<IList<UserLoginInfo>>(((User)user).LoginInfos);
}
public Task RemoveLoginAsync(IUser user, string loginProvider, string providerKey, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
var externalLogins = ((User)user).LoginInfos;
if (externalLogins != null)
{
var item = externalLogins.FirstOrDefault(c => c.LoginProvider == loginProvider && c.ProviderKey == providerKey);
if (item != null)
{
externalLogins.Remove(item);
}
}
return Task.CompletedTask;
}
#endregion IUserLoginStore<IUser>
#region IUserClaimStore<IUser>
public Task<IList<Claim>> GetClaimsAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult<IList<Claim>>(((User)user).UserClaims.Select(x => x.ToClaim()).ToList());
}
public Task AddClaimsAsync(IUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken)
{
if (user == null)
throw new ArgumentNullException(nameof(user));
if (claims == null)
throw new ArgumentNullException(nameof(claims));
foreach (var claim in claims)
{
((User)user).UserClaims.Add(new UserClaim { ClaimType = claim.Type, ClaimValue = claim.Value });
}
return Task.CompletedTask;
}
public Task ReplaceClaimAsync(IUser user, Claim claim, Claim newClaim, CancellationToken cancellationToken)
{
if (user == null)
throw new ArgumentNullException(nameof(user));
if (claim == null)
throw new ArgumentNullException(nameof(claim));
if (newClaim == null)
throw new ArgumentNullException(nameof(newClaim));
foreach (var userClaim in ((User)user).UserClaims.Where(uc => uc.ClaimValue == claim.Value && uc.ClaimType == claim.Type))
{
userClaim.ClaimValue = newClaim.Value;
userClaim.ClaimType = newClaim.Type;
}
return Task.CompletedTask;
}
public Task RemoveClaimsAsync(IUser user, IEnumerable<Claim> claims, CancellationToken cancellationToken)
{
if (user == null)
throw new ArgumentNullException(nameof(user));
if (claims == null)
throw new ArgumentNullException(nameof(claims));
foreach (var claim in claims)
{
foreach (var userClaim in ((User)user).UserClaims.Where(uc => uc.ClaimValue == claim.Value && uc.ClaimType == claim.Type).ToList())
((User)user).UserClaims.Remove(userClaim);
}
return Task.CompletedTask;
}
public async Task<IList<IUser>> GetUsersForClaimAsync(Claim claim, CancellationToken cancellationToken)
{
if (claim == null)
throw new ArgumentNullException(nameof(claim));
var users = await _session.Query<User, UserByClaimIndex>(uc => uc.ClaimType == claim.Type && uc.ClaimValue == claim.Value).ListAsync();
return users.Cast<IUser>().ToList();
}
#endregion IUserClaimStore<IUser>
#region IUserAuthenticationTokenStore
public Task<string> GetTokenAsync(IUser user, string loginProvider, string name, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (string.IsNullOrEmpty(loginProvider))
{
throw new ArgumentException("The login provider cannot be null or empty.", nameof(loginProvider));
}
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException("The name cannot be null or empty.", nameof(name));
}
string tokenValue = null;
var userToken = GetUserToken(user, loginProvider, name);
if (userToken != null)
{
tokenValue = _dataProtectionProvider.CreateProtector(TokenProtector).Unprotect(userToken.Value);
}
return Task.FromResult(tokenValue);
}
public Task RemoveTokenAsync(IUser user, string loginProvider, string name, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (string.IsNullOrEmpty(loginProvider))
{
throw new ArgumentException("The login provider cannot be null or empty.", nameof(loginProvider));
}
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException("The name cannot be null or empty.", nameof(name));
}
var userToken = GetUserToken(user, loginProvider, name);
if (userToken != null)
{
((User)user).UserTokens.Remove(userToken);
}
return Task.CompletedTask;
}
public Task SetTokenAsync(IUser user, string loginProvider, string name, string value, CancellationToken cancellationToken = default(CancellationToken))
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (string.IsNullOrEmpty(loginProvider))
{
throw new ArgumentException("The login provider cannot be null or empty.", nameof(loginProvider));
}
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException("The name cannot be null or empty.", nameof(name));
}
if (string.IsNullOrEmpty(value))
{
throw new ArgumentException("The value cannot be null or empty.", nameof(value));
}
var userToken = GetUserToken(user, loginProvider, name);
if (userToken == null)
{
userToken = new UserToken
{
LoginProvider = loginProvider,
Name = name
};
((User)user).UserTokens.Add(userToken);
}
// Encrypt the token
userToken.Value = _dataProtectionProvider.CreateProtector(TokenProtector).Protect(value);
return Task.CompletedTask;
}
private static UserToken GetUserToken(IUser user, string loginProvider, string name)
{
return ((User)user).UserTokens.FirstOrDefault(ut => ut.LoginProvider == loginProvider &&
ut.Name == name);
}
#endregion
#region IUserLockoutStore<IUser>
public Task<int> GetAccessFailedCountAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).AccessFailedCount);
}
public Task<bool> GetLockoutEnabledAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
return Task.FromResult(((User)user).IsLockoutEnabled);
}
public Task<DateTimeOffset?> GetLockoutEndDateAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (((User)user).LockoutEndUtc.HasValue)
{
return Task.FromResult<DateTimeOffset?>(((User)user).LockoutEndUtc.Value.ToUniversalTime());
}
else
{
return Task.FromResult<DateTimeOffset?>(null);
}
}
public Task<int> IncrementAccessFailedCountAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).AccessFailedCount++;
return Task.FromResult(((User)user).AccessFailedCount);
}
public Task ResetAccessFailedCountAsync(IUser user, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).AccessFailedCount = 0;
return Task.CompletedTask;
}
public Task SetLockoutEnabledAsync(IUser user, bool enabled, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
((User)user).IsLockoutEnabled = enabled;
return Task.CompletedTask;
}
public Task SetLockoutEndDateAsync(IUser user, DateTimeOffset? lockoutEnd, CancellationToken cancellationToken)
{
if (user == null)
{
throw new ArgumentNullException(nameof(user));
}
if (lockoutEnd.HasValue)
{
((User)user).LockoutEndUtc = lockoutEnd.Value.UtcDateTime;
}
else
{
((User)user).LockoutEndUtc = null;
}
return Task.CompletedTask;
}
#endregion IUserLockoutStore<IUser>
}
}
| |
/*
* REST API Documentation for Schoolbus
*
* API Sample
*
* OpenAPI spec version: v1
*
*
*/
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.IO;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Mvc;
using Microsoft.EntityFrameworkCore;
using Newtonsoft.Json;
using SchoolBusAPI.Models;
using SchoolBusAPI.ViewModels;
using SchoolBusAPI.Mappings;
namespace SchoolBusAPI.Services.Impl
{
/// <summary>
///
/// </summary>
public class RoleApiService : IRoleApiService
{
private readonly DbAppContext _context;
/// <summary>
/// Create a service and set the database context
/// </summary>
public RoleApiService(DbAppContext context)
{
_context = context;
}
/// <summary>
///
/// </summary>
/// <remarks>Bulk load of role permissions</remarks>
/// <param name="items"></param>
/// <response code="201">Roles created</response>
public IActionResult RolepermissionsBulkPostAsync(RolePermission[] items)
{
if (items == null)
{
return new BadRequestResult();
}
foreach (RolePermission item in items)
{
// adjust the role
if (item.Role != null)
{
int role_id = item.Role.Id;
bool role_exists = _context.Roles.Any(a => a.Id == role_id);
if (role_exists)
{
Role role = _context.Roles.First(a => a.Id == role_id);
item.Role = role;
}
}
// adjust the permission
if (item.Permission != null)
{
int permission_id = item.Permission.Id;
bool permission_exists = _context.Permissions.Any(a => a.Id == permission_id);
if (permission_exists)
{
Permission permission = _context.Permissions.First(a => a.Id == permission_id);
item.Permission = permission;
}
}
var exists = _context.RolePermissions.Any(a => a.Id == item.Id);
if (exists)
{
_context.RolePermissions.Update(item);
}
else
{
_context.RolePermissions.Add(item);
}
}
// Save the changes
_context.SaveChanges();
return new NoContentResult();
}
/// <summary>
///
/// </summary>
/// <param name="items"></param>
/// <response code="201">Permissions created</response>
public IActionResult RolesBulkPostAsync(Role[] items)
{
if (items == null)
{
return new BadRequestResult();
}
foreach (Role item in items)
{
var exists = _context.Roles.Any(a => a.Id == item.Id);
if (exists)
{
_context.Roles.Update(item);
}
else
{
_context.Roles.Add(item);
}
}
// Save the changes
_context.SaveChanges();
return new NoContentResult();
}
/// <summary>
///
/// </summary>
/// <remarks>Returns a collection of roles</remarks>
/// <response code="200">OK</response>
public virtual IActionResult RolesGetAsync()
{
var result = _context.Roles.Select(x => x.ToViewModel()).ToList();
return new ObjectResult(result);
}
/// <summary>
///
/// </summary>
/// <param name="id">id of Role to delete</param>
/// <response code="200">OK</response>
/// <response code="404">Role not found</response>
public virtual IActionResult RolesIdDeletePostAsync(int id)
{
var role = _context.Roles.FirstOrDefault(x => x.Id == id);
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
// remove associated role permission records
var itemsToRemove = _context.RolePermissions.Where(x => x.Role.Id == role.Id);
foreach (var item in itemsToRemove)
{
_context.RolePermissions.Remove(item);
}
_context.Roles.Remove(role);
_context.SaveChanges();
return new ObjectResult(role.ToViewModel());
}
/// <summary>
///
/// </summary>
/// <param name="id">id of Role to fetch</param>
/// <response code="200">OK</response>
/// <response code="404">Role not found</response>
public virtual IActionResult RolesIdGetAsync(int id)
{
var role = _context.Roles.FirstOrDefault(x => x.Id == id);
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
return new ObjectResult(role.ToViewModel());
}
/// <summary>
///
/// </summary>
/// <remarks>Get all the permissions for a role</remarks>
/// <param name="id">id of Role to fetch</param>
/// <response code="200">OK</response>
public virtual IActionResult RolesIdPermissionsGetAsync(int id)
{
// Eager loading of related data
var role = _context.Roles
.Where(x => x.Id == id)
.Include(x => x.RolePermissions)
.ThenInclude(rp => rp.Permission)
.FirstOrDefault();
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
var dbPermissions = role.RolePermissions.Select(x => x.Permission);
// Create DTO with serializable response
var result = dbPermissions.Select(x => x.ToViewModel()).ToList();
return new ObjectResult(result);
}
/// <summary>
///
/// </summary>
/// <remarks>Updates the permissions for a role</remarks>
/// <param name="id">id of Role to update</param>
/// <param name="items"></param>
/// <response code="200">OK</response>
/// <response code="404">Role not found</response>
public virtual IActionResult RolesIdPermissionsPutAsync(int id, Permission[] items)
{
using (var txn = _context.BeginTransaction())
{
// Eager loading of related data
var role = _context.Roles
.Where(x => x.Id == id)
.Include(x => x.RolePermissions)
.ThenInclude(rolePerm => rolePerm.Permission)
.FirstOrDefault();
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
var allPermissions = _context.Permissions.ToList();
var permissionCodes = items.Select(x => x.Code).ToList();
var existingPermissionCodes = role.RolePermissions.Select(x => x.Permission.Code).ToList();
var permissionCodesToAdd = permissionCodes.Where(x => !existingPermissionCodes.Contains(x)).ToList();
// Permissions to add
foreach (var code in permissionCodesToAdd)
{
var permToAdd = allPermissions.FirstOrDefault(x => x.Code == code);
if (permToAdd == null)
{
// TODO throw new BusinessLayerException(string.Format("Invalid Permission Code {0}", code));
}
role.AddPermission(permToAdd);
}
// Permissions to remove
List<RolePermission> permissionsToRemove = role.RolePermissions.Where(x => !permissionCodes.Contains(x.Permission.Code)).ToList();
foreach (RolePermission perm in permissionsToRemove)
{
role.RemovePermission(perm.Permission);
_context.RolePermissions.Remove(perm);
}
_context.Roles.Update(role);
_context.SaveChanges();
txn.Commit();
List<RolePermission> dbPermissions = _context.RolePermissions.ToList();
// Create DTO with serializable response
var result = dbPermissions.Select(x => x.ToViewModel()).ToList();
return new ObjectResult(result);
}
}
/// <summary>
///
/// </summary>
/// <remarks>Adds permissions to a role</remarks>
/// <param name="id">id of Role to update</param>
/// <param name="items"></param>
/// <response code="200">OK</response>
/// <response code="404">Role not found</response>
public virtual IActionResult RolesIdPermissionsPostAsync(int id, Permission[] items)
{
using (var txn = _context.BeginTransaction())
{
// Eager loading of related data
var role = _context.Roles
.Where(x => x.Id == id)
.Include(x => x.RolePermissions)
.ThenInclude(rolePerm => rolePerm.Permission)
.FirstOrDefault();
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
var allPermissions = _context.Permissions.ToList();
var permissionCodes = items.Select(x => x.Code).ToList();
var existingPermissionCodes = role.RolePermissions.Select(x => x.Permission.Code).ToList();
var permissionCodesToAdd = permissionCodes.Where(x => !existingPermissionCodes.Contains(x)).ToList();
// Permissions to add
foreach (var code in permissionCodesToAdd)
{
var permToAdd = allPermissions.FirstOrDefault(x => x.Code == code);
if (permToAdd == null)
{
// TODO throw new BusinessLayerException(string.Format("Invalid Permission Code {0}", code));
}
role.AddPermission(permToAdd);
}
_context.Roles.Update(role);
_context.SaveChanges();
txn.Commit();
List<RolePermission> dbPermissions = _context.RolePermissions.ToList();
// Create DTO with serializable response
var result = dbPermissions.Select(x => x.ToViewModel()).ToList();
return new ObjectResult(result);
}
}
/// <summary>
///
/// </summary>
/// <param name="id">id of Role to update</param>
/// <param name="item"></param>
/// <response code="200">OK</response>
/// <response code="404">Role not found</response>
public virtual IActionResult RolesIdPutAsync(int id, RoleViewModel item)
{
var role = _context.Roles.FirstOrDefault(x => x.Id == id);
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
role.Name = item.Name;
role.Description = item.Description;
_context.Roles.Update(role);
// Save changes
_context.SaveChanges();
return new ObjectResult(role.ToViewModel());
}
/// <summary>
///
/// </summary>
/// <remarks>Gets all the users for a role</remarks>
/// <param name="id">id of Role to fetch</param>
/// <response code="200">OK</response>
public virtual IActionResult RolesIdUsersGetAsync(int id)
{
// Eager loading of related data
var role = _context.Roles
.Where(x => x.Id == id)
.Include(x => x.UserRoles)
.ThenInclude(userRole => userRole.User)
.FirstOrDefault();
if (role == null)
{
// Not Found
return new StatusCodeResult(404);
}
var usersWithRole = role.UserRoles;
// Create DTO with serializable response
var result = usersWithRole.Select(x => x.ToViewModel()).ToList();
return new ObjectResult(result);
}
/// <summary>
///
/// </summary>
/// <remarks>Updates the users for a role</remarks>
/// <param name="id">id of Role to update</param>
/// <param name="items"></param>
/// <response code="200">OK</response>
/// <response code="404">Role not found</response>
public virtual IActionResult RolesIdUsersPutAsync(int id, UserRoleViewModel[] items)
{
using (var txn = _context.BeginTransaction())
{
// Eager loading of related data
var role = _context.Roles
.Where(x => x.Id == id)
.Include(x => x.UserRoles)
.ThenInclude(userRole => userRole.User)
.FirstOrDefault();
// Not Found
if (role == null)
{
return new StatusCodeResult(404);
}
var userIds = items.Select(x => x.UserId).ToList();
var allUsers = _context.Users.Where(x => userIds.Contains(x.Id)).ToList();
foreach (var userRoleDto in items)
{
if (userRoleDto.Id.HasValue)
{
var existingUserRole = role.UserRoles.FirstOrDefault(x => x.Id == userRoleDto.Id.Value);
if (existingUserRole == null)
{
// TODO throw new ResourceNotFoundException(string.Format("Cannot find userrole with id {0} on role {1}", userRole.Id.Value, roleId));
}
else
{
// TODO Check serialization of Dates
existingUserRole.EffectiveDate = userRoleDto.EffectiveDate;
existingUserRole.ExpiryDate = userRoleDto.ExpiryDate;
}
}
else
{
var dbUserRole = new UserRole();
dbUserRole.Role = role;
dbUserRole.User = allUsers.FirstOrDefault(x => x.Id == userRoleDto.UserId);
_context.UserRoles.Add(dbUserRole);
role.UserRoles.Add(dbUserRole);
}
}
// Users to remove
var toRemove = role.UserRoles.Where(x => !userIds.Contains(x.User.Id)).ToList();
toRemove.ForEach(x => role.RemoveUser(x.User));
_context.UserRoles.RemoveRange(toRemove);
_context.Roles.Update(role);
// Save changes
_context.SaveChanges();
txn.Commit();
var result = role.UserRoles.ToList();
return new ObjectResult(result);
}
}
/// <summary>
///
/// </summary>
/// <param name="item"></param>
/// <response code="201">Role created</response>
public virtual IActionResult RolesPostAsync(RoleViewModel item)
{
var role = new Role();
role.Description = item.Description;
role.Name = item.Name;
// Save changes
_context.Roles.Add(role);
_context.SaveChanges();
return new ObjectResult(role.ToViewModel());
}
}
}
| |
namespace fyiReporting.RdlDesign
{
/// <summary>
/// Summary description for StyleCtl.
/// </summary>
partial class DataSetsCtl
{
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.Container components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (components != null)
{
components.Dispose();
}
}
base.Dispose(disposing);
}
#region Component Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(DataSetsCtl));
System.Windows.Forms.DataGridViewCellStyle dataGridViewCellStyle1 = new System.Windows.Forms.DataGridViewCellStyle();
this.splitContainer1 = new System.Windows.Forms.SplitContainer();
this.panel2 = new System.Windows.Forms.Panel();
this.panel4 = new System.Windows.Forms.Panel();
this.scintillaSQL = new ScintillaNET.Scintilla();
this.panel3 = new System.Windows.Forms.Panel();
this.bRefresh = new System.Windows.Forms.Button();
this.bEditSQL = new System.Windows.Forms.Button();
this.panel1 = new System.Windows.Forms.Panel();
this.lDataSetName = new System.Windows.Forms.Label();
this.tbDSName = new System.Windows.Forms.TextBox();
this.tbTimeout = new System.Windows.Forms.NumericUpDown();
this.label3 = new System.Windows.Forms.Label();
this.label1 = new System.Windows.Forms.Label();
this.cbDataSource = new System.Windows.Forms.ComboBox();
this.lDataSource = new System.Windows.Forms.Label();
this.bDeleteField = new System.Windows.Forms.Button();
this.dgFields = new System.Windows.Forms.DataGridView();
this.dgtbName = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgtbQueryName = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgtbValue = new System.Windows.Forms.DataGridViewTextBoxColumn();
this.dgtbTypeName = new System.Windows.Forms.DataGridViewComboBoxColumn();
this.label2 = new System.Windows.Forms.Label();
this.dataGridTableStyle1 = new System.Windows.Forms.DataGridTableStyle();
((System.ComponentModel.ISupportInitialize)(this.splitContainer1)).BeginInit();
this.splitContainer1.Panel1.SuspendLayout();
this.splitContainer1.Panel2.SuspendLayout();
this.splitContainer1.SuspendLayout();
this.panel2.SuspendLayout();
this.panel4.SuspendLayout();
this.panel3.SuspendLayout();
this.panel1.SuspendLayout();
((System.ComponentModel.ISupportInitialize)(this.tbTimeout)).BeginInit();
((System.ComponentModel.ISupportInitialize)(this.dgFields)).BeginInit();
this.SuspendLayout();
//
// splitContainer1
//
resources.ApplyResources(this.splitContainer1, "splitContainer1");
this.splitContainer1.Name = "splitContainer1";
//
// splitContainer1.Panel1
//
this.splitContainer1.Panel1.Controls.Add(this.panel2);
this.splitContainer1.Panel1.Controls.Add(this.panel1);
//
// splitContainer1.Panel2
//
this.splitContainer1.Panel2.Controls.Add(this.bDeleteField);
this.splitContainer1.Panel2.Controls.Add(this.dgFields);
this.splitContainer1.Panel2.Controls.Add(this.label2);
//
// panel2
//
this.panel2.Controls.Add(this.panel4);
this.panel2.Controls.Add(this.panel3);
resources.ApplyResources(this.panel2, "panel2");
this.panel2.Name = "panel2";
//
// panel4
//
this.panel4.Controls.Add(this.scintillaSQL);
resources.ApplyResources(this.panel4, "panel4");
this.panel4.Name = "panel4";
//
// scintillaSQL
//
resources.ApplyResources(this.scintillaSQL, "scintillaSQL");
this.scintillaSQL.Lexer = ScintillaNET.Lexer.Sql;
this.scintillaSQL.Name = "scintillaSQL";
this.scintillaSQL.UseTabs = false;
this.scintillaSQL.TextChanged += new System.EventHandler(this.tbSQL_TextChanged);
//
// panel3
//
this.panel3.Controls.Add(this.bRefresh);
this.panel3.Controls.Add(this.bEditSQL);
resources.ApplyResources(this.panel3, "panel3");
this.panel3.Name = "panel3";
//
// bRefresh
//
resources.ApplyResources(this.bRefresh, "bRefresh");
this.bRefresh.Name = "bRefresh";
this.bRefresh.Click += new System.EventHandler(this.bRefresh_Click);
//
// bEditSQL
//
resources.ApplyResources(this.bEditSQL, "bEditSQL");
this.bEditSQL.Name = "bEditSQL";
this.bEditSQL.Click += new System.EventHandler(this.bEditSQL_Click);
//
// panel1
//
this.panel1.Controls.Add(this.lDataSetName);
this.panel1.Controls.Add(this.tbDSName);
this.panel1.Controls.Add(this.tbTimeout);
this.panel1.Controls.Add(this.label3);
this.panel1.Controls.Add(this.label1);
this.panel1.Controls.Add(this.cbDataSource);
this.panel1.Controls.Add(this.lDataSource);
resources.ApplyResources(this.panel1, "panel1");
this.panel1.Name = "panel1";
//
// lDataSetName
//
resources.ApplyResources(this.lDataSetName, "lDataSetName");
this.lDataSetName.Name = "lDataSetName";
//
// tbDSName
//
resources.ApplyResources(this.tbDSName, "tbDSName");
this.tbDSName.Name = "tbDSName";
this.tbDSName.TextChanged += new System.EventHandler(this.tbDSName_TextChanged);
//
// tbTimeout
//
resources.ApplyResources(this.tbTimeout, "tbTimeout");
this.tbTimeout.Maximum = new decimal(new int[] {
2147483647,
0,
0,
0});
this.tbTimeout.Name = "tbTimeout";
this.tbTimeout.ValueChanged += new System.EventHandler(this.tbTimeout_ValueChanged);
//
// label3
//
resources.ApplyResources(this.label3, "label3");
this.label3.Name = "label3";
//
// label1
//
resources.ApplyResources(this.label1, "label1");
this.label1.Name = "label1";
//
// cbDataSource
//
resources.ApplyResources(this.cbDataSource, "cbDataSource");
this.cbDataSource.DropDownStyle = System.Windows.Forms.ComboBoxStyle.DropDownList;
this.cbDataSource.Name = "cbDataSource";
this.cbDataSource.SelectedIndexChanged += new System.EventHandler(this.cbDataSource_SelectedIndexChanged);
//
// lDataSource
//
resources.ApplyResources(this.lDataSource, "lDataSource");
this.lDataSource.Name = "lDataSource";
//
// bDeleteField
//
resources.ApplyResources(this.bDeleteField, "bDeleteField");
this.bDeleteField.Name = "bDeleteField";
this.bDeleteField.Click += new System.EventHandler(this.bDeleteField_Click);
//
// dgFields
//
resources.ApplyResources(this.dgFields, "dgFields");
this.dgFields.AutoSizeColumnsMode = System.Windows.Forms.DataGridViewAutoSizeColumnsMode.Fill;
dataGridViewCellStyle1.Alignment = System.Windows.Forms.DataGridViewContentAlignment.MiddleLeft;
dataGridViewCellStyle1.BackColor = System.Drawing.SystemColors.ControlText;
dataGridViewCellStyle1.Font = new System.Drawing.Font("Microsoft Sans Serif", 8.25F, System.Drawing.FontStyle.Regular, System.Drawing.GraphicsUnit.Point, ((byte)(204)));
dataGridViewCellStyle1.ForeColor = System.Drawing.SystemColors.WindowText;
dataGridViewCellStyle1.SelectionBackColor = System.Drawing.SystemColors.Highlight;
dataGridViewCellStyle1.SelectionForeColor = System.Drawing.SystemColors.HighlightText;
dataGridViewCellStyle1.WrapMode = System.Windows.Forms.DataGridViewTriState.True;
this.dgFields.ColumnHeadersDefaultCellStyle = dataGridViewCellStyle1;
this.dgFields.Columns.AddRange(new System.Windows.Forms.DataGridViewColumn[] {
this.dgtbName,
this.dgtbQueryName,
this.dgtbValue,
this.dgtbTypeName});
this.dgFields.Name = "dgFields";
//
// dgtbName
//
this.dgtbName.DataPropertyName = "Name";
resources.ApplyResources(this.dgtbName, "dgtbName");
this.dgtbName.Name = "dgtbName";
//
// dgtbQueryName
//
this.dgtbQueryName.DataPropertyName = "QueryName";
resources.ApplyResources(this.dgtbQueryName, "dgtbQueryName");
this.dgtbQueryName.Name = "dgtbQueryName";
//
// dgtbValue
//
this.dgtbValue.DataPropertyName = "Value";
resources.ApplyResources(this.dgtbValue, "dgtbValue");
this.dgtbValue.Name = "dgtbValue";
//
// dgtbTypeName
//
this.dgtbTypeName.DataPropertyName = "TypeName";
resources.ApplyResources(this.dgtbTypeName, "dgtbTypeName");
this.dgtbTypeName.Items.AddRange(new object[] {
"System.String",
"System.Int16",
"System.Int32",
"System.Int64",
"System.UInt16",
"System.UInt32",
"System.UInt64",
"System.Single",
"System.Double",
"System.Decimal",
"System.DateTime",
"System.Char",
"System.Boolean",
"System.Byte"});
this.dgtbTypeName.Name = "dgtbTypeName";
this.dgtbTypeName.Resizable = System.Windows.Forms.DataGridViewTriState.True;
this.dgtbTypeName.SortMode = System.Windows.Forms.DataGridViewColumnSortMode.Automatic;
//
// label2
//
resources.ApplyResources(this.label2, "label2");
this.label2.Name = "label2";
//
// dataGridTableStyle1
//
this.dataGridTableStyle1.DataGrid = null;
this.dataGridTableStyle1.HeaderForeColor = System.Drawing.SystemColors.ControlText;
//
// DataSetsCtl
//
this.Controls.Add(this.splitContainer1);
resources.ApplyResources(this, "$this");
this.Name = "DataSetsCtl";
this.splitContainer1.Panel1.ResumeLayout(false);
this.splitContainer1.Panel2.ResumeLayout(false);
((System.ComponentModel.ISupportInitialize)(this.splitContainer1)).EndInit();
this.splitContainer1.ResumeLayout(false);
this.panel2.ResumeLayout(false);
this.panel4.ResumeLayout(false);
this.panel3.ResumeLayout(false);
this.panel1.ResumeLayout(false);
this.panel1.PerformLayout();
((System.ComponentModel.ISupportInitialize)(this.tbTimeout)).EndInit();
((System.ComponentModel.ISupportInitialize)(this.dgFields)).EndInit();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.DataGridTableStyle dataGridTableStyle1;
private System.Windows.Forms.SplitContainer splitContainer1;
private System.Windows.Forms.NumericUpDown tbTimeout;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Button bRefresh;
private System.Windows.Forms.Button bEditSQL;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.ComboBox cbDataSource;
private System.Windows.Forms.Label lDataSource;
private System.Windows.Forms.TextBox tbDSName;
private System.Windows.Forms.Label lDataSetName;
private System.Windows.Forms.Button bDeleteField;
private System.Windows.Forms.DataGridView dgFields;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.DataGridViewTextBoxColumn dgtbName;
private System.Windows.Forms.DataGridViewTextBoxColumn dgtbQueryName;
private System.Windows.Forms.DataGridViewTextBoxColumn dgtbValue;
private System.Windows.Forms.DataGridViewComboBoxColumn dgtbTypeName;
private System.Windows.Forms.Panel panel2;
private System.Windows.Forms.Panel panel4;
private ScintillaNET.Scintilla scintillaSQL;
private System.Windows.Forms.Panel panel3;
private System.Windows.Forms.Panel panel1;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.Data.Common;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Text;
using SysTx = System.Transactions;
namespace System.Data.Odbc
{
public sealed partial class OdbcConnection : DbConnection, ICloneable
{
private int _connectionTimeout = ADP.DefaultConnectionTimeout;
private OdbcInfoMessageEventHandler _infoMessageEventHandler;
private WeakReference _weakTransaction;
private OdbcConnectionHandle _connectionHandle;
private ConnectionState _extraState = default(ConnectionState); // extras, like Executing and Fetching, that we add to the State.
public OdbcConnection(string connectionString) : this()
{
ConnectionString = connectionString;
}
private OdbcConnection(OdbcConnection connection) : this()
{ // Clone
CopyFrom(connection);
_connectionTimeout = connection._connectionTimeout;
}
internal OdbcConnectionHandle ConnectionHandle
{
get
{
return _connectionHandle;
}
set
{
Debug.Assert(null == _connectionHandle, "reopening a connection?");
_connectionHandle = value;
}
}
public override string ConnectionString
{
get
{
return ConnectionString_Get();
}
set
{
ConnectionString_Set(value);
}
}
[
DefaultValue(ADP.DefaultConnectionTimeout),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
]
public new int ConnectionTimeout
{
get
{
return _connectionTimeout;
}
set
{
if (value < 0)
throw ODBC.NegativeArgument();
if (IsOpen)
throw ODBC.CantSetPropertyOnOpenConnection();
_connectionTimeout = value;
}
}
[
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
]
public override string Database
{
get
{
if (IsOpen && !ProviderInfo.NoCurrentCatalog)
{
//Note: CURRENT_CATALOG may not be supported by the current driver. In which
//case we ignore any error (without throwing), and just return string.empty.
//As we really don't want people to have to have try/catch around simple properties
return GetConnectAttrString(ODBC32.SQL_ATTR.CURRENT_CATALOG);
}
//Database is not available before open, and its not worth parsing the
//connection string over.
return string.Empty;
}
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
]
public override string DataSource
{
get
{
if (IsOpen)
{
// note: This will return an empty string if the driver keyword was used to connect
// see ODBC3.0 Programmers Reference, SQLGetInfo
//
return GetInfoStringUnhandled(ODBC32.SQL_INFO.SERVER_NAME, true);
}
return string.Empty;
}
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
]
public override string ServerVersion
{
get
{
return InnerConnection.ServerVersion;
}
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
]
public override ConnectionState State
{
get
{
return InnerConnection.State;
}
}
internal OdbcConnectionPoolGroupProviderInfo ProviderInfo
{
get
{
Debug.Assert(null != this.PoolGroup, "PoolGroup must never be null when accessing ProviderInfo");
return (OdbcConnectionPoolGroupProviderInfo)this.PoolGroup.ProviderInfo;
}
}
internal ConnectionState InternalState
{
get
{
return (this.State | _extraState);
}
}
internal bool IsOpen
{
get
{
return (InnerConnection is OdbcConnectionOpen);
}
}
internal OdbcTransaction LocalTransaction
{
get
{
OdbcTransaction result = null;
if (null != _weakTransaction)
{
result = ((OdbcTransaction)_weakTransaction.Target);
}
return result;
}
set
{
_weakTransaction = null;
if (null != value)
{
_weakTransaction = new WeakReference((OdbcTransaction)value);
}
}
}
[
Browsable(false),
DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden),
]
public string Driver
{
get
{
if (IsOpen)
{
if (ProviderInfo.DriverName == null)
{
ProviderInfo.DriverName = GetInfoStringUnhandled(ODBC32.SQL_INFO.DRIVER_NAME);
}
return ProviderInfo.DriverName;
}
return ADP.StrEmpty;
}
}
internal bool IsV3Driver
{
get
{
if (ProviderInfo.DriverVersion == null)
{
ProviderInfo.DriverVersion = GetInfoStringUnhandled(ODBC32.SQL_INFO.DRIVER_ODBC_VER);
// protected against null and index out of range. Number cannot be bigger than 99
if (ProviderInfo.DriverVersion != null && ProviderInfo.DriverVersion.Length >= 2)
{
try
{ // mdac 89269: driver may return malformatted string
ProviderInfo.IsV3Driver = (int.Parse(ProviderInfo.DriverVersion.Substring(0, 2), CultureInfo.InvariantCulture) >= 3);
}
catch (System.FormatException e)
{
ProviderInfo.IsV3Driver = false;
ADP.TraceExceptionWithoutRethrow(e);
}
}
else
{
ProviderInfo.DriverVersion = "";
}
}
return ProviderInfo.IsV3Driver;
}
}
public event OdbcInfoMessageEventHandler InfoMessage
{
add
{
_infoMessageEventHandler += value;
}
remove
{
_infoMessageEventHandler -= value;
}
}
internal char EscapeChar(string method)
{
CheckState(method);
if (!ProviderInfo.HasEscapeChar)
{
string escapeCharString;
escapeCharString = GetInfoStringUnhandled(ODBC32.SQL_INFO.SEARCH_PATTERN_ESCAPE);
Debug.Assert((escapeCharString.Length <= 1), "Can't handle multichar quotes");
ProviderInfo.EscapeChar = (escapeCharString.Length == 1) ? escapeCharString[0] : QuoteChar(method)[0];
}
return ProviderInfo.EscapeChar;
}
internal string QuoteChar(string method)
{
CheckState(method);
if (!ProviderInfo.HasQuoteChar)
{
string quoteCharString;
quoteCharString = GetInfoStringUnhandled(ODBC32.SQL_INFO.IDENTIFIER_QUOTE_CHAR);
Debug.Assert((quoteCharString.Length <= 1), "Can't handle multichar quotes");
ProviderInfo.QuoteChar = (1 == quoteCharString.Length) ? quoteCharString : "\0";
}
return ProviderInfo.QuoteChar;
}
public new OdbcTransaction BeginTransaction()
{
return BeginTransaction(IsolationLevel.Unspecified);
}
public new OdbcTransaction BeginTransaction(IsolationLevel isolevel)
{
return (OdbcTransaction)InnerConnection.BeginTransaction(isolevel);
}
private void RollbackDeadTransaction()
{
WeakReference weak = _weakTransaction;
if ((null != weak) && !weak.IsAlive)
{
_weakTransaction = null;
ConnectionHandle.CompleteTransaction(ODBC32.SQL_ROLLBACK);
}
}
public override void ChangeDatabase(string value)
{
InnerConnection.ChangeDatabase(value);
}
internal void CheckState(string method)
{
ConnectionState state = InternalState;
if (ConnectionState.Open != state)
{
throw ADP.OpenConnectionRequired(method, state); // MDAC 68323
}
}
object ICloneable.Clone()
{
OdbcConnection clone = new OdbcConnection(this);
return clone;
}
internal bool ConnectionIsAlive(Exception innerException)
{
if (IsOpen)
{
if (!ProviderInfo.NoConnectionDead)
{
int isDead = GetConnectAttr(ODBC32.SQL_ATTR.CONNECTION_DEAD, ODBC32.HANDLER.IGNORE);
if (ODBC32.SQL_CD_TRUE == isDead)
{
Close();
throw ADP.ConnectionIsDisabled(innerException);
}
}
// else connection is still alive or attribute not supported
return true;
}
return false;
}
public new OdbcCommand CreateCommand()
{
return new OdbcCommand(string.Empty, this);
}
internal OdbcStatementHandle CreateStatementHandle()
{
return new OdbcStatementHandle(ConnectionHandle);
}
public override void Close()
{
InnerConnection.CloseConnection(this, ConnectionFactory);
OdbcConnectionHandle connectionHandle = _connectionHandle;
if (null != connectionHandle)
{
_connectionHandle = null;
// If there is a pending transaction, automatically rollback.
WeakReference weak = _weakTransaction;
if (null != weak)
{
_weakTransaction = null;
IDisposable transaction = weak.Target as OdbcTransaction;
if ((null != transaction) && weak.IsAlive)
{
transaction.Dispose();
}
// else transaction will be rolled back when handle is disposed
}
connectionHandle.Dispose();
}
}
private void DisposeMe(bool disposing)
{ // MDAC 65459
}
internal string GetConnectAttrString(ODBC32.SQL_ATTR attribute)
{
string value = "";
int cbActual = 0;
byte[] buffer = new byte[100];
OdbcConnectionHandle connectionHandle = ConnectionHandle;
if (null != connectionHandle)
{
ODBC32.RetCode retcode = connectionHandle.GetConnectionAttribute(attribute, buffer, out cbActual);
if (buffer.Length + 2 <= cbActual)
{
// 2 bytes for unicode null-termination character
// retry with cbActual because original buffer was too small
buffer = new byte[cbActual + 2];
retcode = connectionHandle.GetConnectionAttribute(attribute, buffer, out cbActual);
}
if ((ODBC32.RetCode.SUCCESS == retcode) || (ODBC32.RetCode.SUCCESS_WITH_INFO == retcode))
{
value = Encoding.Unicode.GetString(buffer, 0, Math.Min(cbActual, buffer.Length));
}
else if (retcode == ODBC32.RetCode.ERROR)
{
string sqlstate = GetDiagSqlState();
if (("HYC00" == sqlstate) || ("HY092" == sqlstate) || ("IM001" == sqlstate))
{
FlagUnsupportedConnectAttr(attribute);
}
// not throwing errors if not supported or other failure
}
}
return value;
}
internal int GetConnectAttr(ODBC32.SQL_ATTR attribute, ODBC32.HANDLER handler)
{
int retval = -1;
int cbActual = 0;
byte[] buffer = new byte[4];
OdbcConnectionHandle connectionHandle = ConnectionHandle;
if (null != connectionHandle)
{
ODBC32.RetCode retcode = connectionHandle.GetConnectionAttribute(attribute, buffer, out cbActual);
if ((ODBC32.RetCode.SUCCESS == retcode) || (ODBC32.RetCode.SUCCESS_WITH_INFO == retcode))
{
retval = BitConverter.ToInt32(buffer, 0);
}
else
{
if (retcode == ODBC32.RetCode.ERROR)
{
string sqlstate = GetDiagSqlState();
if (("HYC00" == sqlstate) || ("HY092" == sqlstate) || ("IM001" == sqlstate))
{
FlagUnsupportedConnectAttr(attribute);
}
}
if (handler == ODBC32.HANDLER.THROW)
{
this.HandleError(connectionHandle, retcode);
}
}
}
return retval;
}
private string GetDiagSqlState()
{
OdbcConnectionHandle connectionHandle = ConnectionHandle;
string sqlstate;
connectionHandle.GetDiagnosticField(out sqlstate);
return sqlstate;
}
internal ODBC32.RetCode GetInfoInt16Unhandled(ODBC32.SQL_INFO info, out short resultValue)
{
byte[] buffer = new byte[2];
ODBC32.RetCode retcode = ConnectionHandle.GetInfo1(info, buffer);
resultValue = BitConverter.ToInt16(buffer, 0);
return retcode;
}
internal ODBC32.RetCode GetInfoInt32Unhandled(ODBC32.SQL_INFO info, out int resultValue)
{
byte[] buffer = new byte[4];
ODBC32.RetCode retcode = ConnectionHandle.GetInfo1(info, buffer);
resultValue = BitConverter.ToInt32(buffer, 0);
return retcode;
}
private int GetInfoInt32Unhandled(ODBC32.SQL_INFO infotype)
{
byte[] buffer = new byte[4];
ConnectionHandle.GetInfo1(infotype, buffer);
return BitConverter.ToInt32(buffer, 0);
}
internal string GetInfoStringUnhandled(ODBC32.SQL_INFO info)
{
return GetInfoStringUnhandled(info, false);
}
private string GetInfoStringUnhandled(ODBC32.SQL_INFO info, bool handleError)
{
//SQLGetInfo
string value = null;
short cbActual = 0;
byte[] buffer = new byte[100];
OdbcConnectionHandle connectionHandle = ConnectionHandle;
if (null != connectionHandle)
{
ODBC32.RetCode retcode = connectionHandle.GetInfo2(info, buffer, out cbActual);
if (buffer.Length < cbActual - 2)
{
// 2 bytes for unicode null-termination character
// retry with cbActual because original buffer was too small
buffer = new byte[cbActual + 2];
retcode = connectionHandle.GetInfo2(info, buffer, out cbActual);
}
if (retcode == ODBC32.RetCode.SUCCESS || retcode == ODBC32.RetCode.SUCCESS_WITH_INFO)
{
value = Encoding.Unicode.GetString(buffer, 0, Math.Min(cbActual, buffer.Length));
}
else if (handleError)
{
this.HandleError(ConnectionHandle, retcode);
}
}
else if (handleError)
{
value = "";
}
return value;
}
// non-throwing HandleError
internal Exception HandleErrorNoThrow(OdbcHandle hrHandle, ODBC32.RetCode retcode)
{
Debug.Assert(retcode != ODBC32.RetCode.INVALID_HANDLE, "retcode must never be ODBC32.RetCode.INVALID_HANDLE");
switch (retcode)
{
case ODBC32.RetCode.SUCCESS:
break;
case ODBC32.RetCode.SUCCESS_WITH_INFO:
{
//Optimize to only create the event objects and obtain error info if
//the user is really interested in retriveing the events...
if (_infoMessageEventHandler != null)
{
OdbcErrorCollection errors = ODBC32.GetDiagErrors(null, hrHandle, retcode);
errors.SetSource(this.Driver);
OnInfoMessage(new OdbcInfoMessageEventArgs(errors));
}
break;
}
default:
OdbcException e = OdbcException.CreateException(ODBC32.GetDiagErrors(null, hrHandle, retcode), retcode);
if (e != null)
{
e.Errors.SetSource(this.Driver);
}
ConnectionIsAlive(e); // this will close and throw if the connection is dead
return (Exception)e;
}
return null;
}
internal void HandleError(OdbcHandle hrHandle, ODBC32.RetCode retcode)
{
Exception e = HandleErrorNoThrow(hrHandle, retcode);
switch (retcode)
{
case ODBC32.RetCode.SUCCESS:
case ODBC32.RetCode.SUCCESS_WITH_INFO:
Debug.Assert(null == e, "success exception");
break;
default:
Debug.Assert(null != e, "failure without exception");
throw e;
}
}
public override void Open()
{
try
{
InnerConnection.OpenConnection(this, ConnectionFactory);
}
catch (DllNotFoundException e) when (!RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
throw new DllNotFoundException(SR.Odbc_UnixOdbcNotFound + Environment.NewLine + e.Message);
}
// SQLBUDT #276132 - need to manually enlist in some cases, because
// native ODBC doesn't know about SysTx transactions.
if (ADP.NeedManualEnlistment())
{
EnlistTransaction(SysTx.Transaction.Current);
}
}
private void OnInfoMessage(OdbcInfoMessageEventArgs args)
{
if (null != _infoMessageEventHandler)
{
try
{
_infoMessageEventHandler(this, args);
}
catch (Exception e)
{
//
if (!ADP.IsCatchableOrSecurityExceptionType(e))
{
throw;
}
ADP.TraceExceptionWithoutRethrow(e);
}
}
}
public static void ReleaseObjectPool()
{
OdbcEnvironment.ReleaseObjectPool();
}
internal OdbcTransaction SetStateExecuting(string method, OdbcTransaction transaction)
{ // MDAC 69003
if (null != _weakTransaction)
{ // transaction may exist
OdbcTransaction weak = (_weakTransaction.Target as OdbcTransaction);
if (transaction != weak)
{ // transaction doesn't exist
if (null == transaction)
{ // transaction exists
throw ADP.TransactionRequired(method);
}
if (this != transaction.Connection)
{
// transaction can't have come from this connection
throw ADP.TransactionConnectionMismatch();
}
// if transaction is zombied, we don't know the original connection
transaction = null; // MDAC 69264
}
}
else if (null != transaction)
{ // no transaction started
if (null != transaction.Connection)
{
// transaction can't have come from this connection
throw ADP.TransactionConnectionMismatch();
}
// if transaction is zombied, we don't know the original connection
transaction = null; // MDAC 69264
}
ConnectionState state = InternalState;
if (ConnectionState.Open != state)
{
NotifyWeakReference(OdbcReferenceCollection.Recover); // recover for a potentially finalized reader
state = InternalState;
if (ConnectionState.Open != state)
{
if (0 != (ConnectionState.Fetching & state))
{
throw ADP.OpenReaderExists();
}
throw ADP.OpenConnectionRequired(method, state);
}
}
return transaction;
}
// This adds a type to the list of types that are supported by the driver
// (don't need to know that for all the types)
//
internal void SetSupportedType(ODBC32.SQL_TYPE sqltype)
{
ODBC32.SQL_CVT sqlcvt;
switch (sqltype)
{
case ODBC32.SQL_TYPE.NUMERIC:
{
sqlcvt = ODBC32.SQL_CVT.NUMERIC;
break;
}
case ODBC32.SQL_TYPE.WCHAR:
{
sqlcvt = ODBC32.SQL_CVT.WCHAR;
break;
}
case ODBC32.SQL_TYPE.WVARCHAR:
{
sqlcvt = ODBC32.SQL_CVT.WVARCHAR;
break;
}
case ODBC32.SQL_TYPE.WLONGVARCHAR:
{
sqlcvt = ODBC32.SQL_CVT.WLONGVARCHAR;
break;
}
default:
// other types are irrelevant at this time
return;
}
ProviderInfo.TestedSQLTypes |= (int)sqlcvt;
ProviderInfo.SupportedSQLTypes |= (int)sqlcvt;
}
internal void FlagRestrictedSqlBindType(ODBC32.SQL_TYPE sqltype)
{
ODBC32.SQL_CVT sqlcvt;
switch (sqltype)
{
case ODBC32.SQL_TYPE.NUMERIC:
{
sqlcvt = ODBC32.SQL_CVT.NUMERIC;
break;
}
case ODBC32.SQL_TYPE.DECIMAL:
{
sqlcvt = ODBC32.SQL_CVT.DECIMAL;
break;
}
default:
// other types are irrelevant at this time
return;
}
ProviderInfo.RestrictedSQLBindTypes |= (int)sqlcvt;
}
internal void FlagUnsupportedConnectAttr(ODBC32.SQL_ATTR Attribute)
{
switch (Attribute)
{
case ODBC32.SQL_ATTR.CURRENT_CATALOG:
ProviderInfo.NoCurrentCatalog = true;
break;
case ODBC32.SQL_ATTR.CONNECTION_DEAD:
ProviderInfo.NoConnectionDead = true;
break;
default:
Debug.Fail("Can't flag unknown Attribute");
break;
}
}
internal void FlagUnsupportedStmtAttr(ODBC32.SQL_ATTR Attribute)
{
switch (Attribute)
{
case ODBC32.SQL_ATTR.QUERY_TIMEOUT:
ProviderInfo.NoQueryTimeout = true;
break;
case (ODBC32.SQL_ATTR)ODBC32.SQL_SOPT_SS.NOBROWSETABLE:
ProviderInfo.NoSqlSoptSSNoBrowseTable = true;
break;
case (ODBC32.SQL_ATTR)ODBC32.SQL_SOPT_SS.HIDDEN_COLUMNS:
ProviderInfo.NoSqlSoptSSHiddenColumns = true;
break;
default:
Debug.Fail("Can't flag unknown Attribute");
break;
}
}
internal void FlagUnsupportedColAttr(ODBC32.SQL_DESC v3FieldId, ODBC32.SQL_COLUMN v2FieldId)
{
if (IsV3Driver)
{
switch (v3FieldId)
{
case (ODBC32.SQL_DESC)ODBC32.SQL_CA_SS.COLUMN_KEY:
// SSS_WARNINGS_OFF
ProviderInfo.NoSqlCASSColumnKey = true;
break;
// SSS_WARNINGS_ON
default:
Debug.Fail("Can't flag unknown Attribute");
break;
}
}
else
{
switch (v2FieldId)
{
default:
Debug.Fail("Can't flag unknown Attribute");
break;
}
}
}
internal bool SQLGetFunctions(ODBC32.SQL_API odbcFunction)
{
//SQLGetFunctions
ODBC32.RetCode retcode;
short fExists;
Debug.Assert((short)odbcFunction != 0, "SQL_API_ALL_FUNCTIONS is not supported");
OdbcConnectionHandle connectionHandle = ConnectionHandle;
if (null != connectionHandle)
{
retcode = connectionHandle.GetFunctions(odbcFunction, out fExists);
}
else
{
Debug.Fail("GetFunctions called and ConnectionHandle is null (connection is disposed?)");
throw ODBC.ConnectionClosed();
}
if (retcode != ODBC32.RetCode.SUCCESS)
this.HandleError(connectionHandle, retcode);
if (fExists == 0)
{
return false;
}
else
{
return true;
}
}
internal bool TestTypeSupport(ODBC32.SQL_TYPE sqltype)
{
ODBC32.SQL_CONVERT sqlconvert;
ODBC32.SQL_CVT sqlcvt;
// we need to convert the sqltype to sqlconvert and sqlcvt first
//
switch (sqltype)
{
case ODBC32.SQL_TYPE.NUMERIC:
{
sqlconvert = ODBC32.SQL_CONVERT.NUMERIC;
sqlcvt = ODBC32.SQL_CVT.NUMERIC;
break;
}
case ODBC32.SQL_TYPE.WCHAR:
{
sqlconvert = ODBC32.SQL_CONVERT.CHAR;
sqlcvt = ODBC32.SQL_CVT.WCHAR;
break;
}
case ODBC32.SQL_TYPE.WVARCHAR:
{
sqlconvert = ODBC32.SQL_CONVERT.VARCHAR;
sqlcvt = ODBC32.SQL_CVT.WVARCHAR;
break;
}
case ODBC32.SQL_TYPE.WLONGVARCHAR:
{
sqlconvert = ODBC32.SQL_CONVERT.LONGVARCHAR;
sqlcvt = ODBC32.SQL_CVT.WLONGVARCHAR;
break;
}
default:
Debug.Fail("Testing that sqltype is currently not supported");
return false;
}
// now we can check if we have already tested that type
// if not we need to do so
if (0 == (ProviderInfo.TestedSQLTypes & (int)sqlcvt))
{
int flags;
flags = GetInfoInt32Unhandled((ODBC32.SQL_INFO)sqlconvert);
flags = flags & (int)sqlcvt;
ProviderInfo.TestedSQLTypes |= (int)sqlcvt;
ProviderInfo.SupportedSQLTypes |= flags;
}
// now check if the type is supported and return the result
//
return (0 != (ProviderInfo.SupportedSQLTypes & (int)sqlcvt));
}
internal bool TestRestrictedSqlBindType(ODBC32.SQL_TYPE sqltype)
{
ODBC32.SQL_CVT sqlcvt;
switch (sqltype)
{
case ODBC32.SQL_TYPE.NUMERIC:
{
sqlcvt = ODBC32.SQL_CVT.NUMERIC;
break;
}
case ODBC32.SQL_TYPE.DECIMAL:
{
sqlcvt = ODBC32.SQL_CVT.DECIMAL;
break;
}
default:
Debug.Fail("Testing that sqltype is currently not supported");
return false;
}
return (0 != (ProviderInfo.RestrictedSQLBindTypes & (int)sqlcvt));
}
// suppress this message - we cannot use SafeHandle here. Also, see notes in the code (VSTFDEVDIV# 560355)
[SuppressMessage("Microsoft.Reliability", "CA2004:RemoveCallsToGCKeepAlive")]
protected override DbTransaction BeginDbTransaction(IsolationLevel isolationLevel)
{
DbTransaction transaction = InnerConnection.BeginTransaction(isolationLevel);
// VSTFDEVDIV# 560355 - InnerConnection doesn't maintain a ref on the outer connection (this) and
// subsequently leaves open the possibility that the outer connection could be GC'ed before the DbTransaction
// is fully hooked up (leaving a DbTransaction with a null connection property). Ensure that this is reachable
// until the completion of BeginTransaction with KeepAlive
GC.KeepAlive(this);
return transaction;
}
internal OdbcTransaction Open_BeginTransaction(IsolationLevel isolevel)
{
CheckState(ADP.BeginTransaction); // MDAC 68323
RollbackDeadTransaction();
if ((null != _weakTransaction) && _weakTransaction.IsAlive)
{ // regression from Dispose/Finalize work
throw ADP.ParallelTransactionsNotSupported(this);
}
//Use the default for unspecified.
switch (isolevel)
{
case IsolationLevel.Unspecified:
case IsolationLevel.ReadUncommitted:
case IsolationLevel.ReadCommitted:
case IsolationLevel.RepeatableRead:
case IsolationLevel.Serializable:
case IsolationLevel.Snapshot:
break;
case IsolationLevel.Chaos:
throw ODBC.NotSupportedIsolationLevel(isolevel);
default:
throw ADP.InvalidIsolationLevel(isolevel);
};
//Start the transaction
OdbcConnectionHandle connectionHandle = ConnectionHandle;
ODBC32.RetCode retcode = connectionHandle.BeginTransaction(ref isolevel);
if (retcode == ODBC32.RetCode.ERROR)
{
HandleError(connectionHandle, retcode);
}
OdbcTransaction transaction = new OdbcTransaction(this, isolevel, connectionHandle);
_weakTransaction = new WeakReference(transaction); // MDAC 69188
return transaction;
}
internal void Open_ChangeDatabase(string value)
{
CheckState(ADP.ChangeDatabase);
// Database name must not be null, empty or whitspace
if ((null == value) || (0 == value.Trim().Length))
{ // MDAC 62679
throw ADP.EmptyDatabaseName();
}
if (1024 < value.Length * 2 + 2)
{
throw ADP.DatabaseNameTooLong();
}
RollbackDeadTransaction();
//Set the database
OdbcConnectionHandle connectionHandle = ConnectionHandle;
ODBC32.RetCode retcode = connectionHandle.SetConnectionAttribute3(ODBC32.SQL_ATTR.CURRENT_CATALOG, value, checked((int)value.Length * 2));
if (retcode != ODBC32.RetCode.SUCCESS)
{
HandleError(connectionHandle, retcode);
}
}
internal string Open_GetServerVersion()
{
//SQLGetInfo - SQL_DBMS_VER
return GetInfoStringUnhandled(ODBC32.SQL_INFO.DBMS_VER, true);
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.