context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
using Signum.Engine.Maps;
using Signum.Entities;
using Signum.Entities.DynamicQuery;
using Signum.Utilities;
using Signum.Utilities.Reflection;
using System;
using System.Collections.Generic;
using System.Data;
using System.Data.Common;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using System.Text;
namespace Signum.Engine.Linq
{
/// <summary>
/// QueryFormatter is a visitor that converts an bound expression tree into SQL query text
/// </summary>
internal class QueryFormatter : DbExpressionVisitor
{
Schema schema = Schema.Current;
bool isPostgres = Schema.Current.Settings.IsPostgres;
StringBuilder sb = new StringBuilder();
int indent = 2;
int depth;
ParameterExpression row = Expression.Parameter(typeof(IProjectionRow), "row");
static PropertyInfo miReader = ReflectionTools.GetPropertyInfo((IProjectionRow row) => row.Reader);
class DbParameterPair
{
internal DbParameter Parameter;
internal string Name;
public DbParameterPair(DbParameter parameter, string name)
{
Parameter = parameter;
Name = name;
}
}
Dictionary<Expression, DbParameterPair> parameterExpressions = new Dictionary<Expression, DbParameterPair>();
int parameter = 0;
public string GetNextParamAlias()
{
return "@p" + (parameter++);
}
DbParameterPair CreateParameter(ConstantExpression value)
{
string name = GetNextParamAlias();
bool nullable = value.Type.IsClass || value.Type.IsNullable();
object? val = value.Value;
Type clrType = value.Type.UnNullify();
if (clrType.IsEnum)
{
clrType = typeof(int);
val = val == null ? (int?)null : Convert.ToInt32(val);
}
var typePair = Schema.Current.Settings.GetSqlDbTypePair(clrType);
var pb = Connector.Current.ParameterBuilder;
var param = pb.CreateParameter(name, typePair.DbType, typePair.UserDefinedTypeName, nullable, val ?? DBNull.Value);
return new DbParameterPair(param, name);
}
ObjectNameOptions objectNameOptions;
private QueryFormatter()
{
objectNameOptions = ObjectName.CurrentOptions;
}
static internal SqlPreCommandSimple Format(Expression expression)
{
QueryFormatter qf = new QueryFormatter();
qf.Visit(expression);
var parameters = qf.parameterExpressions.Values.Select(pi => pi.Parameter).ToList();
var sqlpc = new SqlPreCommandSimple(qf.sb.ToString(), parameters);
return sqlpc;
}
protected enum Indentation
{
Same,
Inner,
Outer
}
internal int IndentationWidth
{
get { return this.indent; }
set { this.indent = value; }
}
private void AppendNewLine(Indentation style)
{
sb.AppendLine();
this.Indent(style);
for (int i = 0, n = this.depth * this.indent; i < n; i++)
{
sb.Append(" ");
}
}
private void Indent(Indentation style)
{
if (style == Indentation.Inner)
{
this.depth++;
}
else if (style == Indentation.Outer)
{
this.depth--;
System.Diagnostics.Debug.Assert(this.depth >= 0);
}
}
protected override Expression VisitUnary(UnaryExpression u)
{
switch (u.NodeType)
{
case ExpressionType.Not:
sb.Append(" NOT ");
this.Visit(u.Operand);
break;
case ExpressionType.Negate:
sb.Append(" - ");
this.Visit(u.Operand);
break;
case ExpressionType.UnaryPlus:
sb.Append(" + ");
this.Visit(u.Operand);
break;
case ExpressionType.Convert:
//Las unicas conversiones explicitas son a Binary y desde datetime a numeros
this.Visit(u.Operand);
break;
default:
throw new NotSupportedException(string.Format("The unary perator {0} is not supported", u.NodeType));
}
return u;
}
protected override Expression VisitBinary(BinaryExpression b)
{
if (b.NodeType == ExpressionType.Coalesce)
{
sb.Append("COALESCE(");
Visit(b.Left);
sb.Append(",");
Visit(b.Right);
sb.Append(")");
}
else if (b.NodeType == ExpressionType.Equal || b.NodeType == ExpressionType.NotEqual)
{
sb.Append("(");
Visit(b.Left);
sb.Append(b.NodeType == ExpressionType.Equal ? " = " : " <> ");
Visit(b.Right);
sb.Append(")");
}
else if (b.NodeType == ExpressionType.ArrayIndex)
{
Visit(b.Left);
sb.Append("[");
Visit(b.Right);
sb.Append("]");
}
else
{
sb.Append("(");
this.Visit(b.Left);
switch (b.NodeType)
{
case ExpressionType.And:
case ExpressionType.AndAlso:
sb.Append(b.Type.UnNullify() == typeof(bool) ? " AND " : " & ");
break;
case ExpressionType.Or:
case ExpressionType.OrElse:
sb.Append(b.Type.UnNullify() == typeof(bool) ? " OR " : " | ");
break;
case ExpressionType.ExclusiveOr:
sb.Append(" ^ ");
break;
case ExpressionType.LessThan:
sb.Append(" < ");
break;
case ExpressionType.LessThanOrEqual:
sb.Append(" <= ");
break;
case ExpressionType.GreaterThan:
sb.Append(" > ");
break;
case ExpressionType.GreaterThanOrEqual:
sb.Append(" >= ");
break;
case ExpressionType.Add:
case ExpressionType.AddChecked:
if (this.isPostgres && (b.Left.Type == typeof(string) || b.Right.Type == typeof(string)))
sb.Append(" || ");
else
sb.Append(" + ");
break;
case ExpressionType.Subtract:
case ExpressionType.SubtractChecked:
sb.Append(" - ");
break;
case ExpressionType.Multiply:
case ExpressionType.MultiplyChecked:
sb.Append(" * ");
break;
case ExpressionType.Divide:
sb.Append(" / ");
break;
case ExpressionType.Modulo:
sb.Append(" % ");
break;
default:
throw new NotSupportedException(string.Format("The binary operator {0} is not supported", b.NodeType));
}
this.Visit(b.Right);
sb.Append(")");
}
return b;
}
protected internal override Expression VisitRowNumber(RowNumberExpression rowNumber)
{
sb.Append("ROW_NUMBER() OVER(ORDER BY ");
for (int i = 0, n = rowNumber.OrderBy.Count; i < n; i++)
{
OrderExpression exp = rowNumber.OrderBy[i];
if (i > 0)
sb.Append(", ");
this.Visit(exp.Expression);
if (exp.OrderType != OrderType.Ascending)
sb.Append(" DESC");
}
sb.Append(")");
return rowNumber;
}
protected internal override Expression VisitCase(CaseExpression cex)
{
AppendNewLine(Indentation.Inner);
sb.Append("CASE");
AppendNewLine(Indentation.Inner);
for (int i = 0, n = cex.Whens.Count; i < n; i++)
{
When when = cex.Whens[i];
sb.Append("WHEN ");
Visit(when.Condition);
sb.Append(" THEN ");
Visit(when.Value);
AppendNewLine(Indentation.Same);
}
if (cex.DefaultValue != null)
{
sb.Append("ELSE ");
Visit(cex.DefaultValue);
AppendNewLine(Indentation.Outer);
}
sb.Append("END");
AppendNewLine(Indentation.Outer);
return cex;
}
protected internal override Expression VisitLike(LikeExpression like)
{
Visit(like.Expression);
sb.Append(" LIKE ");
Visit(like.Pattern);
return like;
}
protected internal override Expression VisitExists(ExistsExpression exists)
{
sb.Append("EXISTS(");
this.Visit(exists.Select);
sb.Append(")");
return exists;
}
protected internal override Expression VisitScalar(ScalarExpression exists)
{
sb.Append("(");
this.Visit(exists.Select);
sb.Append(")");
return exists;
}
protected internal override Expression VisitIsNull(IsNullExpression isNull)
{
sb.Append("(");
this.Visit(isNull.Expression);
sb.Append(") IS NULL");
return isNull;
}
protected internal override Expression VisitIsNotNull(IsNotNullExpression isNotNull)
{
sb.Append("(");
this.Visit(isNotNull.Expression);
sb.Append(") IS NOT NULL");
return isNotNull;
}
protected internal override Expression VisitIn(InExpression inExpression)
{
Visit(inExpression.Expression);
sb.Append(" IN (");
if (inExpression.Select == null)
{
bool any = false;
foreach (var obj in inExpression.Values!)
{
VisitConstant(Expression.Constant(obj));
sb.Append(",");
any = true;
}
if (any)
sb.Remove(sb.Length - 1, 1);
}
else
{
Visit(inExpression.Select);
}
sb.Append(" )");
return inExpression;
}
protected internal override Expression VisitSqlLiteral(SqlLiteralExpression sqlEnum)
{
sb.Append(sqlEnum.Value);
return sqlEnum;
}
protected internal override Expression VisitSqlCast(SqlCastExpression castExpr)
{
sb.Append("CAST(");
Visit(castExpr.Expression);
sb.Append(" as ");
sb.Append(castExpr.DbType.ToString(schema.Settings.IsPostgres));
if (!schema.Settings.IsPostgres && (castExpr.DbType.SqlServer == SqlDbType.NVarChar || castExpr.DbType.SqlServer == SqlDbType.VarChar))
sb.Append("(MAX)");
sb.Append(")");
return castExpr;
}
protected override Expression VisitConstant(ConstantExpression c)
{
if (c.Value == null)
sb.Append("NULL");
else
{
if (!schema.Settings.IsDbType(c.Value.GetType().UnNullify()))
throw new NotSupportedException(string.Format("The constant for {0} is not supported", c.Value));
var pi = parameterExpressions.GetOrCreate(c, () => this.CreateParameter(c));
sb.Append(pi.Name);
}
return c;
}
protected internal override Expression VisitSqlConstant(SqlConstantExpression c)
{
if (c.Value == null)
sb.Append("NULL");
else
{
if (!schema.Settings.IsDbType(c.Value.GetType().UnNullify()))
throw new NotSupportedException(string.Format("The constant for {0} is not supported", c.Value));
if (!isPostgres && c.Value.Equals(true))
sb.Append("1");
else if (!isPostgres && c.Value.Equals(false))
sb.Append("0");
else if (c.Value is string s)
sb.Append(s == "" ? "''" : ("'" + s + "'"));
else if (c.Value is TimeSpan ts)
sb.Append(@$"CONVERT(time, '{ts.ToString()}')");
else
sb.Append(c.ToString());
}
return c;
}
protected internal override Expression VisitSqlVariable(SqlVariableExpression sve)
{
sb.Append(sve.VariableName);
return sve;
}
protected internal override Expression VisitColumn(ColumnExpression column)
{
sb.Append(column.Alias.ToString());
if (column.Name != null) //Is null for PostgressFunctions.unnest and friends (IQueryable<int> table-valued function)
{
sb.Append(".");
sb.Append(column.Name.SqlEscape(isPostgres));
}
return column;
}
protected internal override Expression VisitSelect(SelectExpression select)
{
bool isFirst = sb.Length == 0;
if (!isFirst)
{
AppendNewLine(Indentation.Inner);
sb.Append("(");
}
sb.Append("SELECT ");
if (select.IsDistinct)
sb.Append("DISTINCT ");
if (select.Top != null && !this.isPostgres)
{
sb.Append("TOP (");
Visit(select.Top);
sb.Append(") ");
}
if (select.Columns.Count == 0)
sb.Append("0 as Dummy");
else
{
this.AppendNewLine(Indentation.Inner);
for (int i = 0, n = select.Columns.Count; i < n; i++)
{
ColumnDeclaration column = select.Columns[i];
AppendColumn(column);
if (i < (n - 1))
{
sb.Append(", ");
this.AppendNewLine(Indentation.Same);
}
else
{
this.Indent(Indentation.Outer);
}
}
}
if (select.From != null)
{
this.AppendNewLine(Indentation.Same);
sb.Append("FROM ");
this.VisitSource(select.From);
}
if (select.Where != null)
{
this.AppendNewLine(Indentation.Same);
sb.Append("WHERE ");
this.Visit(select.Where);
}
if (select.GroupBy.Count > 0)
{
this.AppendNewLine(Indentation.Same);
sb.Append("GROUP BY ");
for (int i = 0, n = select.GroupBy.Count; i < n; i++)
{
Expression exp = select.GroupBy[i];
if (i > 0)
{
sb.Append(", ");
}
this.Visit(exp);
}
}
if (select.OrderBy.Count > 0)
{
this.AppendNewLine(Indentation.Same);
sb.Append("ORDER BY ");
for (int i = 0, n = select.OrderBy.Count; i < n; i++)
{
OrderExpression exp = select.OrderBy[i];
if (i > 0)
{
sb.Append(", ");
}
this.Visit(exp.Expression);
if (exp.OrderType != OrderType.Ascending)
{
sb.Append(" DESC");
}
}
}
if (select.Top != null && this.isPostgres)
{
this.AppendNewLine(Indentation.Same);
sb.Append("LIMIT ");
Visit(select.Top);
}
if (select.IsForXmlPathEmpty)
{
this.AppendNewLine(Indentation.Same);
sb.Append("FOR XML PATH('')");
}
if (!isFirst)
{
sb.Append(")");
AppendNewLine(Indentation.Outer);
}
return select;
}
string GetAggregateFunction(AggregateSqlFunction agg)
{
return agg switch
{
AggregateSqlFunction.Average => "AVG",
AggregateSqlFunction.StdDev => !isPostgres ? "STDEV" : "stddev_samp",
AggregateSqlFunction.StdDevP => !isPostgres? "STDEVP" : "stddev_pop",
AggregateSqlFunction.Count => "COUNT",
AggregateSqlFunction.CountDistinct => "COUNT",
AggregateSqlFunction.Max => "MAX",
AggregateSqlFunction.Min => "MIN",
AggregateSqlFunction.Sum => "SUM",
AggregateSqlFunction.string_agg => "string_agg",
_ => throw new UnexpectedValueException(agg)
};
}
protected internal override Expression VisitAggregate(AggregateExpression aggregate)
{
sb.Append(GetAggregateFunction(aggregate.AggregateFunction));
sb.Append("(");
if (aggregate.AggregateFunction == AggregateSqlFunction.CountDistinct)
sb.Append("DISTINCT ");
if (aggregate.Arguments.Count == 1 && aggregate.Arguments[0] == null && aggregate.AggregateFunction == AggregateSqlFunction.Count)
{
sb.Append("*");
}
else
{
for (int i = 0, n = aggregate.Arguments.Count; i < n; i++)
{
Expression exp = aggregate.Arguments[i];
if (i > 0)
sb.Append(", ");
this.Visit(exp);
}
}
sb.Append(")");
return aggregate;
}
protected internal override Expression VisitSqlFunction(SqlFunctionExpression sqlFunction)
{
if (isPostgres && sqlFunction.SqlFunction == PostgresFunction.EXTRACT.ToString())
{
sb.Append(sqlFunction.SqlFunction);
sb.Append("(");
this.Visit(sqlFunction.Arguments[0]);
sb.Append(" from ");
this.Visit(sqlFunction.Arguments[1]);
sb.Append(")");
}
else if(isPostgres && PostgressOperator.All.Contains(sqlFunction.SqlFunction))
{
sb.Append("(");
this.Visit(sqlFunction.Arguments[0]);
sb.Append(" " + sqlFunction.SqlFunction + " ");
this.Visit(sqlFunction.Arguments[1]);
sb.Append(")");
}
else if (sqlFunction.SqlFunction == SqlFunction.COLLATE.ToString())
{
this.Visit(sqlFunction.Arguments[0]);
sb.Append(" COLLATE ");
if (sqlFunction.Arguments[1] is SqlConstantExpression ce)
sb.Append((string)ce.Value!);
}
else
{
if (sqlFunction.Object != null)
{
Visit(sqlFunction.Object);
sb.Append(".");
}
sb.Append(sqlFunction.SqlFunction);
sb.Append("(");
for (int i = 0, n = sqlFunction.Arguments.Count; i < n; i++)
{
Expression exp = sqlFunction.Arguments[i];
if (i > 0)
sb.Append(", ");
this.Visit(exp);
}
sb.Append(")");
}
return sqlFunction;
}
protected internal override Expression VisitSqlTableValuedFunction(SqlTableValuedFunctionExpression sqlFunction)
{
sb.Append(sqlFunction.SqlFunction.ToString());
sb.Append("(");
for (int i = 0, n = sqlFunction.Arguments.Count; i < n; i++)
{
Expression exp = sqlFunction.Arguments[i];
if (i > 0)
sb.Append(", ");
this.Visit(exp);
}
sb.Append(")");
return sqlFunction;
}
private void AppendColumn(ColumnDeclaration column)
{
ColumnExpression? c = column.Expression as ColumnExpression;
if (column.Name.HasText() && (c == null || c.Name != column.Name))
{
this.Visit(column.Expression);
sb.Append(" as ");
sb.Append(column.Name.SqlEscape(isPostgres));
}
else
{
this.Visit(column.Expression);
}
}
protected internal override Expression VisitTable(TableExpression table)
{
sb.Append(table.Name.ToString());
if (table.SystemTime != null && !(table.SystemTime is SystemTime.HistoryTable))
{
sb.Append(" ");
WriteSystemTime(table.SystemTime);
}
return table;
}
private void WriteSystemTime(SystemTime st)
{
sb.Append("FOR SYSTEM_TIME ");
if (st is SystemTime.AsOf asOf)
{
sb.Append("AS OF ");
this.VisitSystemTimeConstant(asOf.DateTime);
}
else if (st is SystemTime.Between between)
{
sb.Append("BETWEEN ");
this.VisitSystemTimeConstant(between.StartDateTime);
sb.Append(" AND ");
this.VisitSystemTimeConstant(between.EndtDateTime);
}
else if (st is SystemTime.ContainedIn contained)
{
sb.Append("CONTAINED IN (");
this.VisitSystemTimeConstant(contained.StartDateTime);
sb.Append(", ");
this.VisitSystemTimeConstant(contained.EndtDateTime);
sb.Append(")");
}
else if (st is SystemTime.All)
{
sb.Append("ALL");
}
else
throw new InvalidOperationException("Unexpected");
}
Dictionary<DateTime, ConstantExpression> systemTimeConstants = new Dictionary<DateTime, ConstantExpression>();
void VisitSystemTimeConstant(DateTime datetime)
{
var c = systemTimeConstants.GetOrCreate(datetime, dt => Expression.Constant(dt));
VisitConstant(c);
}
protected internal override SourceExpression VisitSource(SourceExpression source)
{
if (source is SourceWithAliasExpression)
{
if (source is TableExpression || source is SqlTableValuedFunctionExpression)
Visit(source);
else
{
sb.Append("(");
Visit(source);
sb.Append(")");
}
sb.Append(" AS ");
sb.Append(((SourceWithAliasExpression)source).Alias.ToString());
if (source is TableExpression ta && ta.WithHint != null)
{
sb.Append(" WITH(" + ta.WithHint + ")");
}
}
else
this.VisitJoin((JoinExpression)source);
return source;
}
protected internal override Expression VisitJoin(JoinExpression join)
{
this.VisitSource(join.Left);
this.AppendNewLine(Indentation.Same);
switch (join.JoinType)
{
case JoinType.CrossJoin:
sb.Append("CROSS JOIN ");
break;
case JoinType.InnerJoin:
sb.Append("INNER JOIN ");
break;
case JoinType.LeftOuterJoin:
case JoinType.SingleRowLeftOuterJoin:
sb.Append("LEFT OUTER JOIN ");
break;
case JoinType.RightOuterJoin:
sb.Append("RIGHT OUTER JOIN ");
break;
case JoinType.FullOuterJoin:
sb.Append("FULL OUTER JOIN ");
break;
case JoinType.CrossApply:
sb.Append(isPostgres ? "JOIN LATERAL " : "CROSS APPLY ");
break;
case JoinType.OuterApply:
sb.Append(isPostgres ? "LEFT JOIN LATERAL " : "OUTER APPLY ");
break;
}
bool needsMoreParenthesis = (join.JoinType == JoinType.CrossApply || join.JoinType == JoinType.OuterApply) && join.Right is JoinExpression;
if (needsMoreParenthesis)
sb.Append("(");
this.VisitSource(join.Right);
if (needsMoreParenthesis)
sb.Append(")");
if (join.Condition != null)
{
this.AppendNewLine(Indentation.Inner);
sb.Append("ON ");
this.Visit(join.Condition);
this.Indent(Indentation.Outer);
}
else if (isPostgres && join.JoinType != JoinType.CrossJoin)
{
this.AppendNewLine(Indentation.Inner);
sb.Append("ON true");
this.Indent(Indentation.Outer);
}
return join;
}
protected internal override Expression VisitSetOperator(SetOperatorExpression set)
{
VisitSetPart(set.Left);
switch (set.Operator)
{
case SetOperator.Union: sb.Append("UNION"); break;
case SetOperator.UnionAll: sb.Append("UNION ALL"); break;
case SetOperator.Intersect: sb.Append("INTERSECT"); break;
case SetOperator.Except: sb.Append("EXCEPT"); break;
default:
throw new InvalidOperationException("Unexpected SetOperator {0}".FormatWith(set.Operator));
}
VisitSetPart(set.Right);
return set;
}
void VisitSetPart(SourceWithAliasExpression source)
{
if (source is SelectExpression)
{
this.Indent(Indentation.Inner);
VisitSelect((SelectExpression)source);
this.Indent(Indentation.Outer);
}
else if (source is SetOperatorExpression)
{
VisitSetOperator((SetOperatorExpression)source);
}
else
throw new InvalidOperationException("{0} not expected in SetOperatorExpression".FormatWith(source.ToString()));
}
protected internal override Expression VisitDelete(DeleteExpression delete)
{
using (this.PrintSelectRowCount(delete.ReturnRowCount))
{
sb.Append("DELETE FROM ");
sb.Append(delete.Name.ToString());
this.AppendNewLine(Indentation.Same);
if (isPostgres)
sb.Append("USING ");
else
sb.Append("FROM ");
VisitSource(delete.Source);
if (delete.Where != null)
{
this.AppendNewLine(Indentation.Same);
sb.Append("WHERE ");
Visit(delete.Where);
}
return delete;
}
}
protected internal override Expression VisitUpdate(UpdateExpression update)
{
using (this.PrintSelectRowCount(update.ReturnRowCount))
{
sb.Append("UPDATE ");
sb.Append(update.Name.ToString());
sb.Append(" SET");
this.AppendNewLine(Indentation.Inner);
for (int i = 0, n = update.Assigments.Count; i < n; i++)
{
ColumnAssignment assignment = update.Assigments[i];
if (i > 0)
{
sb.Append(",");
this.AppendNewLine(Indentation.Same);
}
sb.Append(assignment.Column.SqlEscape(isPostgres));
sb.Append(" = ");
this.Visit(assignment.Expression);
}
this.AppendNewLine(Indentation.Outer);
sb.Append("FROM ");
VisitSource(update.Source);
if (update.Where != null)
{
this.AppendNewLine(Indentation.Same);
sb.Append("WHERE ");
Visit(update.Where);
}
return update;
}
}
protected internal override Expression VisitInsertSelect(InsertSelectExpression insertSelect)
{
using (this.PrintSelectRowCount(insertSelect.ReturnRowCount))
{
sb.Append("INSERT INTO ");
sb.Append(insertSelect.Name.ToString());
sb.Append("(");
for (int i = 0, n = insertSelect.Assigments.Count; i < n; i++)
{
ColumnAssignment assignment = insertSelect.Assigments[i];
if (i > 0)
{
sb.Append(", ");
if (i % 4 == 0)
this.AppendNewLine(Indentation.Same);
}
sb.Append(assignment.Column.SqlEscape(isPostgres));
}
sb.Append(")");
this.AppendNewLine(Indentation.Same);
if(this.isPostgres && Administrator.IsIdentityBehaviourDisabled(insertSelect.Table))
{
sb.Append("OVERRIDING SYSTEM VALUE");
this.AppendNewLine(Indentation.Same);
}
sb.Append("SELECT ");
for (int i = 0, n = insertSelect.Assigments.Count; i < n; i++)
{
ColumnAssignment assignment = insertSelect.Assigments[i];
if (i > 0)
{
sb.Append(", ");
if (i % 4 == 0)
this.AppendNewLine(Indentation.Same);
}
this.Visit(assignment.Expression);
}
sb.Append(" FROM ");
VisitSource(insertSelect.Source);
return insertSelect;
}
}
protected internal IDisposable? PrintSelectRowCount(bool returnRowCount)
{
if (returnRowCount == false)
return null;
if (!this.isPostgres)
{
return new Disposable(() =>
{
sb.AppendLine();
sb.AppendLine("SELECT @@rowcount");
});
}
else
{
sb.Append("WITH rows AS (");
this.AppendNewLine(Indentation.Inner);
return new Disposable(() =>
{
this.AppendNewLine(Indentation.Same);
sb.Append("RETURNING 1");
this.AppendNewLine(Indentation.Outer);
sb.Append(")");
this.AppendNewLine(Indentation.Same);
sb.Append("SELECT CAST(COUNT(*) AS INTEGER) FROM rows");
});
}
}
protected internal override Expression VisitCommandAggregate(CommandAggregateExpression cea)
{
for (int i = 0, n = cea.Commands.Count; i < n; i++)
{
CommandExpression command = cea.Commands[i];
if (i > 0)
{
sb.Append(";");
this.AppendNewLine(Indentation.Same);
}
this.Visit(command);
}
return cea;
}
protected internal override Expression VisitAggregateRequest(AggregateRequestsExpression aggregate)
{
throw InvalidSqlExpression(aggregate);
}
protected internal override Expression VisitChildProjection(ChildProjectionExpression child)
{
throw InvalidSqlExpression(child);
}
protected override Expression VisitConditional(ConditionalExpression c)
{
throw InvalidSqlExpression(c);
}
protected internal override Expression VisitEmbeddedEntity(EmbeddedEntityExpression eee)
{
throw InvalidSqlExpression(eee);
}
protected internal override Expression VisitImplementedBy(ImplementedByExpression reference)
{
throw InvalidSqlExpression(reference);
}
protected internal override Expression VisitImplementedByAll(ImplementedByAllExpression reference)
{
throw InvalidSqlExpression(reference);
}
protected internal override Expression VisitEntity(EntityExpression ee)
{
throw InvalidSqlExpression(ee);
}
protected override Expression VisitLambda<T>(Expression<T> lambda)
{
throw InvalidSqlExpression(lambda);
}
protected override Expression VisitListInit(ListInitExpression init)
{
throw InvalidSqlExpression(init);
}
protected internal override Expression VisitLiteValue(LiteValueExpression lite)
{
throw InvalidSqlExpression(lite);
}
protected internal override Expression VisitLiteReference(LiteReferenceExpression lite)
{
return base.VisitLiteReference(lite);
}
protected override Expression VisitInvocation(InvocationExpression iv)
{
throw InvalidSqlExpression(iv);
}
protected override Expression VisitMember(MemberExpression m)
{
throw InvalidSqlExpression(m);
}
protected override Expression VisitMemberInit(MemberInitExpression init)
{
throw InvalidSqlExpression(init);
}
protected override Expression VisitMethodCall(MethodCallExpression m)
{
throw InvalidSqlExpression(m);
}
protected internal override Expression VisitMList(MListExpression ml)
{
throw InvalidSqlExpression(ml);
}
protected internal override Expression VisitMListElement(MListElementExpression mle)
{
throw InvalidSqlExpression(mle);
}
protected override Expression VisitNew(NewExpression nex)
{
throw InvalidSqlExpression(nex);
}
protected override Expression VisitNewArray(NewArrayExpression na)
{
throw InvalidSqlExpression(na);
}
protected override Expression VisitParameter(ParameterExpression p)
{
throw InvalidSqlExpression(p);
}
protected internal override Expression VisitTypeEntity(TypeEntityExpression typeFie)
{
throw InvalidSqlExpression(typeFie);
}
protected internal override Expression VisitProjection(ProjectionExpression proj)
{
throw InvalidSqlExpression(proj);
}
protected internal override Expression VisitTypeImplementedBy(TypeImplementedByExpression typeIb)
{
throw InvalidSqlExpression(typeIb);
}
protected internal override Expression VisitTypeImplementedByAll(TypeImplementedByAllExpression typeIba)
{
throw InvalidSqlExpression(typeIba);
}
protected override Expression VisitTypeBinary(TypeBinaryExpression b)
{
throw InvalidSqlExpression(b);
}
private InvalidOperationException InvalidSqlExpression(Expression expression)
{
return new InvalidOperationException("Unexepected expression on sql {0}".FormatWith(expression.ToString()));
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
/* ====================================================================
This product Contains an ASLv2 licensed version of the OOXML signer
package from the eID Applet project
http://code.google.com/p/eid-applet/source/browse/tRunk/README.txt
Copyright (C) 2008-2014 FedICT.
================================================================= */
using System.IO;
namespace TestCases.POIFS.Crypt
{
using NPOI.OpenXml4Net.OPC;
using NPOI.POIFS.Crypt.Dsig;
using NUnit.Framework;
using System;
using System.Collections.Generic;
using System.Security.Cryptography.X509Certificates;
using TestCases;
[TestFixture]
public class TestSignatureInfo
{
private static POIDataSamples testdata = POIDataSamples.GetXmlDSignInstance();
//private static Calendar cal;
//private KeyPair keyPair = null;
private X509Certificate x509 = null;
[SetUp]
public static void InitBouncy() {
//CryptoFunctions.RegisterBouncyCastle();
///*** TODO : Set cal to now ... only Set to fixed date for debugging ... */
//cal = Calendar.Instance;
//cal.Clear();
//cal.TimeZone = (/*setter*/TimeZone.GetTimeZone("UTC"));
//cal.Set(2014, 7, 6, 21, 42, 12);
//// don't run this test when we are using older Xerces as it triggers an XML Parser backwards compatibility issue
//// in the xmlsec jar file
//String AdditionalJar = GetEnvironmentVariable("Additionaljar");
////System.out.Println("Having: " + AdditionalJar);
//Assume.AssumeTrue("Not Running TestSignatureInfo because we are testing with Additionaljar Set to " + AdditionalJar,
// AdditionalJar == null || AdditionalJar.Trim().Length == 0);
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void office2007prettyPrintedRels() {
OPCPackage pkg = OPCPackage.Open(testdata.GetFileInfo("office2007prettyPrintedRels.docx"), PackageAccess.READ);
try {
SignatureConfig sic = new SignatureConfig();
sic.SetOpcPackage(pkg);
SignatureInfo si = new SignatureInfo();
si.SetSignatureConfig(sic);
bool isValid = si.VerifySignature();
Assert.IsTrue(isValid);
} finally {
pkg.Close();
}
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void GetSignerUnsigned() {
String[] testFiles = {
"hello-world-unsigned.docx",
"hello-world-unsigned.pptx",
"hello-world-unsigned.xlsx",
"hello-world-office-2010-technical-preview-unsigned.docx"
};
foreach (String testFile in testFiles) {
OPCPackage pkg = OPCPackage.Open(testdata.GetFileInfo(testFile), PackageAccess.READ);
SignatureConfig sic = new SignatureConfig();
sic.SetOpcPackage(pkg);
SignatureInfo si = new SignatureInfo();
si.SetSignatureConfig(sic);
List<X509Certificate> result = new List<X509Certificate>();
foreach (SignatureInfo.SignaturePart sp in si.GetSignatureParts()) {
if (sp.Validate()) {
result.Add(sp.GetSigner());
}
}
pkg.Revert();
pkg.Close();
Assert.IsNotNull(result);
Assert.IsEmpty(result);
}
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void GetSigner() {
String[] testFiles = {
"hyperlink-example-signed.docx",
"hello-world-signed.docx",
"hello-world-signed.pptx",
"hello-world-signed.xlsx",
"hello-world-office-2010-technical-preview.docx",
"ms-office-2010-signed.docx",
"ms-office-2010-signed.pptx",
"ms-office-2010-signed.xlsx",
"Office2010-SP1-XAdES-X-L.docx",
"signed.docx",
};
foreach (String testFile in testFiles) {
OPCPackage pkg = OPCPackage.Open(testdata.GetFileInfo(testFile), PackageAccess.READ);
try {
SignatureConfig sic = new SignatureConfig();
sic.SetOpcPackage(pkg);
SignatureInfo si = new SignatureInfo();
si.SetSignatureConfig(sic);
List<X509Certificate> result = new List<X509Certificate>();
foreach (SignatureInfo.SignaturePart sp in si.GetSignatureParts()) {
if (sp.Validate()) {
result.Add(sp.GetSigner());
}
}
Assert.IsNotNull(result);
Assert.AreEqual(1, result.Count, "test-file: " + testFile);
X509Certificate signer = result[0];
//LOG.Log(POILogger.DEBUG, "signer: " + signer.SubjectX500Principal);
bool b = si.VerifySignature();
Assert.IsTrue(b, "test-file: " + testFile);
pkg.Revert();
} finally {
pkg.Close();
}
}
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void GetMultiSigners() {
String testFile = "hello-world-signed-twice.docx";
OPCPackage pkg = OPCPackage.Open(testdata.GetFileInfo(testFile), PackageAccess.READ);
//try {
// SignatureConfig sic = new SignatureConfig();
// sic.OpcPackage = (/*setter*/pkg);
// SignatureInfo si = new SignatureInfo();
// si.SignatureConfig = (/*setter*/sic);
// List<X509Certificate> result = new List<X509Certificate>();
// foreach (SignaturePart sp in si.SignatureParts) {
// if (sp.Validate()) {
// result.Add(sp.Signer);
// }
// }
// Assert.IsNotNull(result);
// Assert.AreEqual("test-file: " + testFile, 2, result.Size());
// X509Certificate signer1 = result.Get(0);
// X509Certificate signer2 = result.Get(1);
// //LOG.Log(POILogger.DEBUG, "signer 1: " + signer1.SubjectX500Principal);
// //LOG.Log(POILogger.DEBUG, "signer 2: " + signer2.SubjectX500Principal);
// bool b = si.VerifySignature();
// Assert.IsTrue("test-file: " + testFile, b);
// pkg.Revert();
//} finally {
// pkg.Close();
//}
throw new NotImplementedException();
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void TestSignSpreadsheet()
{
String testFile = "hello-world-unsigned.xlsx";
OPCPackage pkg = OPCPackage.Open(copy(testdata.GetFileInfo(testFile)), PackageAccess.READ_WRITE);
sign(pkg, "Test", "CN=Test", 1);
pkg.Close();
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void TestManipulation() {
//// sign & validate
//String testFile = "hello-world-unsigned.xlsx";
//OPCPackage pkg = OPCPackage.Open(copy(testdata.GetFile(testFile)), PackageAccess.READ_WRITE);
//sign(pkg, "Test", "CN=Test", 1);
//// manipulate
//XSSFWorkbook wb = new XSSFWorkbook(pkg);
//wb.SetSheetName(0, "manipulated");
//// ... I don't know, why Commit is protected ...
//Method m = typeof(XSSFWorkbook).GetDeclaredMethod("commit");
//m.Accessible = (/*setter*/true);
//m.Invoke(wb);
//// todo: test a manipulation on a package part, which is not signed
//// ... maybe in combination with #56164
//// validate
//SignatureConfig sic = new SignatureConfig();
//sic.OpcPackage = (/*setter*/pkg);
//SignatureInfo si = new SignatureInfo();
//si.SignatureConfig = (/*setter*/sic);
//bool b = si.VerifySignature();
//Assert.IsFalse("signature should be broken", b);
//wb.Close();
throw new NotImplementedException();
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void TestSignSpreadsheetWithSignatureInfo() {
//InitKeyPair("Test", "CN=Test");
//String testFile = "hello-world-unsigned.xlsx";
//OPCPackage pkg = OPCPackage.Open(copy(testdata.GetFile(testFile)), PackageAccess.READ_WRITE);
//SignatureConfig sic = new SignatureConfig();
//sic.OpcPackage = (/*setter*/pkg);
//sic.Key = (/*setter*/keyPair.Private);
//sic.SigningCertificateChain = (/*setter*/Collections.SingletonList(x509));
//SignatureInfo si = new SignatureInfo();
//si.SignatureConfig = (/*setter*/sic);
//// hash > sha1 doesn't work in excel viewer ...
//si.ConfirmSignature();
//List<X509Certificate> result = new List<X509Certificate>();
//foreach (SignaturePart sp in si.SignatureParts) {
// if (sp.Validate()) {
// result.Add(sp.Signer);
// }
//}
//Assert.AreEqual(1, result.Size());
//pkg.Close();
throw new NotImplementedException();
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void TestSignEnvelopingDocument() {
//String testFile = "hello-world-unsigned.xlsx";
//OPCPackage pkg = OPCPackage.Open(copy(testdata.GetFile(testFile)), PackageAccess.READ_WRITE);
//InitKeyPair("Test", "CN=Test");
//X509CRL crl = PkiTestUtils.GenerateCrl(x509, keyPair.Private);
//// Setup
//SignatureConfig signatureConfig = new SignatureConfig();
//signatureConfig.OpcPackage=(/*setter*/pkg);
//signatureConfig.Key=(/*setter*/keyPair.Private);
///*
// * We need at least 2 certificates for the XAdES-C complete certificate
// * refs construction.
// */
//List<X509Certificate> certificateChain = new List<X509Certificate>();
//certificateChain.Add(x509);
//certificateChain.Add(x509);
//signatureConfig.SigningCertificateChain=(/*setter*/certificateChain);
//signatureConfig.AddSignatureFacet(new EnvelopedSignatureFacet());
//signatureConfig.AddSignatureFacet(new KeyInfoSignatureFacet());
//signatureConfig.AddSignatureFacet(new XAdESSignatureFacet());
//signatureConfig.AddSignatureFacet(new XAdESXLSignatureFacet());
//// check for internet, no error means it works
//bool mockTsp = (getAccessError("http://timestamp.comodoca.com/rfc3161", true, 10000) != null);
//// http://timestamping.edelweb.fr/service/tsp
//// http://tsa.belgium.be/connect
//// http://timestamp.comodoca.com/authenticode
//// http://timestamp.comodoca.com/rfc3161
//// http://services.globaltrustFinder.com/adss/tsa
//signatureConfig.TspUrl=(/*setter*/"http://timestamp.comodoca.com/rfc3161");
//signatureConfig.TspRequestPolicy=(/*setter*/null); // comodoca request fails, if default policy is Set ...
//signatureConfig.TspOldProtocol=(/*setter*/false);
////set proxy info if any
//String proxy = GetEnvironmentVariable("http_proxy");
//if (proxy != null && proxy.Trim().Length > 0) {
// signatureConfig.ProxyUrl=(/*setter*/proxy);
//}
//if (mockTsp) {
// //TimeStampService tspService = new TimeStampService(){
// // public byte[] timeStamp(byte[] data, RevocationData revocationData) {
// // revocationData.AddCRL(crl);
// // return "time-stamp-token".Bytes;
// // }
// // public void SetSignatureConfig(SignatureConfig config) {
// // // empty on purpose
// // }
// //};
// //signatureConfig.TspService=(/*setter*/tspService);
//} else {
// TimeStampServiceValidator tspValidator = new TimeStampServiceValidator() {
// public void validate(List<X509Certificate> certificateChain,
// RevocationData revocationData) {
// foreach (X509Certificate certificate in certificateChain) {
// LOG.Log(POILogger.DEBUG, "certificate: " + certificate.SubjectX500Principal);
// LOG.Log(POILogger.DEBUG, "validity: " + certificate.NotBefore + " - " + certificate.NotAfter);
// }
// }
// };
// signatureConfig.TspValidator=(/*setter*/tspValidator);
// signatureConfig.TspOldProtocol=(/*setter*/signatureConfig.TspUrl.Contains("edelweb"));
//}
//RevocationData revocationData = new RevocationData();
//revocationData.AddCRL(crl);
//OCSPResp ocspResp = PkiTestUtils.CreateOcspResp(x509, false,
// x509, x509, keyPair.Private, "SHA1withRSA", cal.TimeInMillis);
//revocationData.AddOCSP(ocspResp.Encoded);
//RevocationDataService revocationDataService = new RevocationDataService(){
// public RevocationData GetRevocationData(List<X509Certificate> certificateChain) {
// return revocationData;
// }
//};
//signatureConfig.RevocationDataService=(/*setter*/revocationDataService);
//// operate
//SignatureInfo si = new SignatureInfo();
//si.SignatureConfig=(/*setter*/signatureConfig);
//try {
// si.ConfirmSignature();
//} catch (Exception e) {
// // only allow a ConnectException because of timeout, we see this in Jenkins from time to time...
// if(e.Cause == null) {
// throw e;
// }
// if(!(e.Cause is ConnectException)) {
// throw e;
// }
// Assert.IsTrue("Only allowing ConnectException with 'timed out' as message here, but had: " + e, e.Cause.Message.Contains("timed out"));
//}
//// verify
//Iterator<SignaturePart> spIter = si.SignatureParts.Iterator();
//Assert.IsTrue(spIter.HasNext());
//SignaturePart sp = spIter.Next();
//bool valid = sp.Validate();
//Assert.IsTrue(valid);
//SignatureDocument sigDoc = sp.SignatureDocument;
//String declareNS =
// "declare namespace xades='http://uri.etsi.org/01903/v1.3.2#'; "
// + "declare namespace ds='http://www.w3.org/2000/09/xmldsig#'; ";
//String digestValXQuery = declareNS +
// "$this/ds:Signature/ds:SignedInfo/ds:Reference";
//foreach (ReferenceType rt in (ReferenceType[])sigDoc.SelectPath(digestValXQuery)) {
// Assert.IsNotNull(rt.DigestValue);
// Assert.AreEqual(signatureConfig.DigestMethodUri, rt.DigestMethod.Algorithm);
//}
//String certDigestXQuery = declareNS +
// "$this//xades:SigningCertificate/xades:Cert/xades:CertDigest";
//XmlObject xoList[] = sigDoc.SelectPath(certDigestXQuery);
//Assert.AreEqual(xoList.Length, 1);
//DigestAlgAndValueType certDigest = (DigestAlgAndValueType)xoList[0];
//Assert.IsNotNull(certDigest.DigestValue);
//String qualPropXQuery = declareNS +
// "$this/ds:Signature/ds:Object/xades:QualifyingProperties";
//xoList = sigDoc.SelectPath(qualPropXQuery);
//Assert.AreEqual(xoList.Length, 1);
//QualifyingPropertiesType qualProp = (QualifyingPropertiesType)xoList[0];
//bool qualPropXsdOk = qualProp.Validate();
//Assert.IsTrue(qualPropXsdOk);
//pkg.Close();
throw new NotImplementedException();
}
public static String GetAccessError(String destinationUrl, bool fireRequest, int timeout) {
//URL url;
//try {
// url = new URL(destinationUrl);
//} catch (MalformedURLException e) {
// throw new ArgumentException("Invalid destination URL", e);
//}
//HttpURLConnection conn = null;
//try {
// conn = (HttpURLConnection) url.OpenConnection();
// // Set specified timeout if non-zero
// if(timeout != 0) {
// conn.ConnectTimeout=(/*setter*/timeout);
// conn.ReadTimeout=(/*setter*/timeout);
// }
// conn.DoOutput=(/*setter*/false);
// conn.DoInput=(/*setter*/true);
// /* if connecting is not possible this will throw a connection refused exception */
// conn.Connect();
// if (fireRequest) {
// InputStream is1 = null;
// try {
// is1 = conn.InputStream;
// } finally {
// IOUtils.CloseQuietly(is1);
// }
// }
// /* if connecting is possible we return true here */
// return null;
//} catch (IOException e) {
// /* exception is thrown -> server not available */
// return e.Class.Name + ": " + e.Message;
//} finally {
// if (conn != null) {
// conn.Disconnect();
// }
//}
throw new NotImplementedException();
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void TestCertChain() {
//KeyStore keystore = KeyStore.GetInstance("PKCS12");
//String password = "test";
//InputStream is1 = testdata.OpenResourceAsStream("chaintest.pfx");
//keystore.Load(is, password.ToCharArray());
//is1.Close();
//Key key = keystore.GetKey("poitest", password.ToCharArray());
//Certificate chainList[] = keystore.GetCertificateChain("poitest");
//List<X509Certificate> certChain = new List<X509Certificate>();
//foreach (Certificate c in chainList) {
// certChain.Add((X509Certificate)c);
//}
//x509 = certChain.Get(0);
//keyPair = new KeyPair(x509.PublicKey, (PrivateKey)key);
//String testFile = "hello-world-unsigned.xlsx";
//OPCPackage pkg = OPCPackage.Open(copy(testdata.GetFile(testFile)), PackageAccess.READ_WRITE);
//SignatureConfig signatureConfig = new SignatureConfig();
//signatureConfig.Key = (/*setter*/keyPair.Private);
//signatureConfig.SigningCertificateChain = (/*setter*/certChain);
//Calendar cal = Calendar.Instance;
//cal.Set(2007, 7, 1);
//signatureConfig.ExecutionTime = (/*setter*/cal.Time);
//signatureConfig.DigestAlgo = (/*setter*/HashAlgorithm.sha1);
//signatureConfig.OpcPackage = (/*setter*/pkg);
//SignatureInfo si = new SignatureInfo();
//si.SignatureConfig = (/*setter*/signatureConfig);
//si.ConfirmSignature();
//foreach (SignaturePart sp in si.SignatureParts) {
// Assert.IsTrue("Could not validate", sp.Validate());
// X509Certificate signer = sp.Signer;
// Assert.IsNotNull("signer undefined?!", signer);
// List<X509Certificate> certChainRes = sp.CertChain;
// Assert.AreEqual(3, certChainRes.Size());
//}
//pkg.Close();
throw new NotImplementedException();
}
[Test]
[Ignore("Implement this test in future, maybe based on poi 4.2")]
public void TestNonSha1() {
String testFile = "hello-world-unsigned.xlsx";
InitKeyPair("Test", "CN=Test");
SignatureConfig signatureConfig = new SignatureConfig();
//signatureConfig.Key = (/*setter*/keyPair.Private);
//signatureConfig.SigningCertificateChain = (/*setter*/Collections.SingletonList(x509));
//HashAlgorithm testAlgo[] = { HashAlgorithm.sha224, HashAlgorithm.sha256
// , HashAlgorithm.sha384, HashAlgorithm.sha512, HashAlgorithm.ripemd160 };
//foreach (HashAlgorithm ha in testAlgo) {
// OPCPackage pkg = null;
// try {
// signatureConfig.DigestAlgo = (/*setter*/ha);
// pkg = OPCPackage.Open(copy(testdata.GetFile(testFile)), PackageAccess.READ_WRITE);
// signatureConfig.OpcPackage = (/*setter*/pkg);
// SignatureInfo si = new SignatureInfo();
// si.SignatureConfig = (/*setter*/signatureConfig);
// si.ConfirmSignature();
// bool b = si.VerifySignature();
// Assert.IsTrue("Signature not correctly calculated for " + ha, b);
// } finally {
// if (pkg != null) pkg.Close();
// }
//}
throw new NotImplementedException();
}
[Test, Ignore("not implemented")]
public void TestMultiSign()
{
//initKeyPair("KeyA", "CN=KeyA");
//KeyPair keyPairA = keyPair;
//X509Certificate x509A = x509;
//initKeyPair("KeyB", "CN=KeyB");
//KeyPair keyPairB = keyPair;
//X509Certificate x509B = x509;
//File tpl = copy(testdata.GetFile("bug58630.xlsx"));
//OPCPackage pkg = OPCPackage.open(tpl);
//SignatureConfig signatureConfig = new SignatureConfig();
}
private void sign(OPCPackage pkgCopy, String alias, String signerDn, int signerCount) {
throw new NotImplementedException();
//InitKeyPair(alias, signerDn);
//SignatureConfig signatureConfig = new SignatureConfig();
//signatureConfig.Key=(/*setter*/keyPair.Private);
//signatureConfig.SigningCertificateChain=(/*setter*/Collections.SingletonList(x509));
//signatureConfig.ExecutionTime=(/*setter*/cal.Time);
//signatureConfig.DigestAlgo=(/*setter*/HashAlgorithm.sha1);
//signatureConfig.OpcPackage=(/*setter*/pkgCopy);
//SignatureInfo si = new SignatureInfo();
//si.SignatureConfig=(/*setter*/signatureConfig);
//Document document = DocumentHelper.CreateDocument();
//// operate
//DigestInfo digestInfo = si.PreSign(document, null);
//// verify
//Assert.IsNotNull(digestInfo);
//LOG.Log(POILogger.DEBUG, "digest algo: " + digestInfo.HashAlgo);
//LOG.Log(POILogger.DEBUG, "digest description: " + digestInfo.description);
//Assert.AreEqual("Office OpenXML Document", digestInfo.description);
//Assert.IsNotNull(digestInfo.HashAlgo);
//Assert.IsNotNull(digestInfo.digestValue);
//// Setup: key material, signature value
//byte[] signatureValue = si.SignDigest(digestInfo.digestValue);
//// operate: postSign
//si.PostSign(document, signatureValue);
//// verify: signature
//si.SignatureConfig.OpcPackage=(/*setter*/pkgCopy);
//List<X509Certificate> result = new List<X509Certificate>();
//foreach (SignaturePart sp in si.SignatureParts) {
// if (sp.Validate()) {
// result.Add(sp.Signer);
// }
//}
//Assert.AreEqual(signerCount, result.Size());
}
private void InitKeyPair(String alias, String subjectDN) {
throw new NotImplementedException();
//char password[] = "test".ToCharArray();
//File file = new File("build/test.pfx");
//KeyStore keystore = KeyStore.GetInstance("PKCS12");
//if (file.Exists()) {
// FileInputStream fis = new FileInputStream(file);
// keystore.Load(fis, password);
// fis.Close();
//} else {
// keystore.Load(null, password);
//}
//if (keystore.IsKeyEntry(alias)) {
// Key key = keystore.GetKey(alias, password);
// x509 = (X509Certificate)keystore.GetCertificate(alias);
// keyPair = new KeyPair(x509.PublicKey, (PrivateKey)key);
//} else {
// keyPair = PkiTestUtils.GenerateKeyPair();
// Calendar cal = Calendar.Instance;
// Date notBefore = cal.Time;
// cal.Add(Calendar.YEAR, 1);
// Date notAfter = cal.Time;
// KeyUsage keyUsage = new KeyUsage(KeyUsage.digitalSignature);
// x509 = PkiTestUtils.GenerateCertificate(keyPair.Public, subjectDN
// , notBefore, notAfter, null, keyPair.Private, true, 0, null, null, keyUsage);
// keystore.KeyEntry=(/*setter*/alias, keyPair.Private, password, new Certificate[]{x509});
// FileOutputStream fos = new FileOutputStream(file);
// keystore.Store(fos, password);
// fos.Close();
//}
}
private static FileInfo copy(FileInfo input) {
String extension = input.Name.Replace(".*?(\\.[^.]+)?$", "$1");
if (extension == null || "".Equals(extension))
extension = ".zip";
FileInfo tmpFile = new FileInfo("build" + Path.DirectorySeparatorChar+ "sigtest" + extension);
throw new NotImplementedException();
//FileStream fos = tmpFile.Create();// FileOutputStream(tmpFile);
//FileStream fis = input.Create();// new FileInputStream(input);
//IOUtils.Copy(fis, fos);
//fis.Close();
//fos.Close();
//return tmpFile;
}
}
}
| |
// Copyright (c) 2007, Clarius Consulting, Manas Technology Solutions, InSTEDD, and Contributors.
// All rights reserved. Licensed under the BSD 3-Clause License; see License.txt.
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using System.Text.RegularExpressions;
using Moq.Properties;
namespace Moq
{
/// <summary>
/// Allows the specification of a matching condition for an argument in a method invocation,
/// rather than a specific argument value. "It" refers to the argument being matched.
/// </summary>
/// <remarks>
/// This class allows the setup to match a method invocation with an arbitrary value,
/// with a value in a specified range, or even one that matches a given predicate.
/// </remarks>
public static class It
{
/// <summary>
/// Contains matchers for <see langword="ref"/> (C#) / <see langword="ByRef"/> (VB.NET) parameters of type <typeparamref name="TValue"/>.
/// </summary>
/// <typeparam name="TValue">The parameter type.</typeparam>
public static class Ref<TValue>
{
/// <summary>
/// Matches any value that is assignment-compatible with type <typeparamref name="TValue"/>.
/// </summary>
public static TValue IsAny;
}
/// <summary>
/// Matches any value of the given <typeparamref name="TValue"/> type.
/// </summary>
/// <typeparam name="TValue">Type of the value.</typeparam>
/// <remarks>
/// Typically used when the actual argument value for a method call is not relevant.
/// </remarks>
/// <example>
/// <code>
/// // Throws an exception for a call to Remove with any string value.
/// mock.Setup(x => x.Remove(It.IsAny<string>())).Throws(new InvalidOperationException());
/// </code>
/// </example>
public static TValue IsAny<TValue>()
{
if (typeof(TValue).IsTypeMatcher())
{
return Match.Create<TValue>(
(argument, parameterType) => argument == null || parameterType.IsAssignableFrom(argument.GetType()),
() => It.IsAny<TValue>());
}
else
{
return Match.Create<TValue>(
argument => argument == null || argument is TValue,
() => It.IsAny<TValue>());
}
}
private static readonly MethodInfo isAnyMethod = typeof(It).GetMethod(nameof(It.IsAny), BindingFlags.Public | BindingFlags.Static);
internal static MethodCallExpression IsAny(Type genericArgument)
{
return Expression.Call(It.isAnyMethod.MakeGenericMethod(genericArgument));
}
/// <summary>
/// A type matcher that matches any generic type argument.
/// <para>
/// If the generic type parameter is constrained to <see langword="struct"/> (C#) / <see langword="Structure"/>
/// (VB.NET), use <see cref="It.IsValueType"/> instead.
/// </para>
/// <para>
/// If the generic type parameter has more specific constraints,
/// you can define your own type matcher inheriting from the type to which the type parameter is constrained.
/// See <see cref="TypeMatcherAttribute"/> and <see cref="ITypeMatcher"/>.
/// </para>
/// </summary>
[TypeMatcher]
public sealed class IsAnyType : ITypeMatcher
{
bool ITypeMatcher.Matches(Type type)
{
return true;
}
}
/// <summary>
/// Matches any value of the given <typeparamref name="TValue"/> type, except null.
/// </summary>
/// <typeparam name="TValue">Type of the value.</typeparam>
public static TValue IsNotNull<TValue>()
{
if (typeof(TValue).IsTypeMatcher())
{
return Match.Create<TValue>(
(argument, parameterType) => argument != null && parameterType.IsAssignableFrom(argument.GetType()),
() => It.IsNotNull<TValue>());
}
else
{
return Match.Create<TValue>(
argument => argument is TValue,
() => It.IsNotNull<TValue>());
}
}
/// <summary>
/// Matches any value that satisfies the given predicate.
/// </summary>
/// <param name="match">The predicate used to match the method argument.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <remarks>
/// Allows the specification of a predicate to perform matching of method call arguments.
/// </remarks>
/// <example>
/// This example shows how to return the value <c>1</c> whenever the argument to
/// the <c>Do</c> method is an even number.
/// <code>
/// mock.Setup(x => x.Do(It.Is<int>(i => i % 2 == 0)))
/// .Returns(1);
/// </code>
/// </example>
/// <example>
/// This example shows how to throw an exception if the argument to the method
/// is a negative number:
/// <code>
/// mock.Setup(x => x.GetUser(It.Is<int>(i => i < 0)))
/// .Throws(new ArgumentException());
/// </code>
/// </example>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures")]
public static TValue Is<TValue>(Expression<Func<TValue, bool>> match)
{
if (typeof(TValue).IsTypeMatcher())
{
throw new ArgumentException(Resources.UseItIsOtherOverload, nameof(match));
}
var thisMethod = (MethodInfo)MethodBase.GetCurrentMethod();
return Match.Create<TValue>(
argument => match.CompileUsingExpressionCompiler().Invoke(argument),
Expression.Lambda<Func<TValue>>(Expression.Call(thisMethod.MakeGenericMethod(typeof(TValue)), match)));
}
/// <summary>
/// Matches any value that satisfies the given predicate.
/// <para>
/// Use this overload when you specify a type matcher for <typeparamref name="TValue"/>.
/// The <paramref name="match"/> callback you provide will then receive the actual parameter type
/// as well as the invocation argument.
/// </para>
/// </summary>
/// <param name="match">The predicate used to match the method argument.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <remarks>
/// Allows the specification of a predicate to perform matching of method call arguments.
/// </remarks>
[EditorBrowsable(EditorBrowsableState.Advanced)]
public static TValue Is<TValue>(Expression<Func<object, Type, bool>> match)
{
var thisMethod = (MethodInfo)MethodBase.GetCurrentMethod();
return Match.Create<TValue>(
(argument, parameterType) => match.CompileUsingExpressionCompiler().Invoke(argument, parameterType),
Expression.Lambda<Func<TValue>>(Expression.Call(thisMethod.MakeGenericMethod(typeof(TValue)), match)));
}
/// <summary>
/// Matches any value that is in the range specified.
/// </summary>
/// <param name="from">The lower bound of the range.</param>
/// <param name="to">The upper bound of the range.</param>
/// <param name="rangeKind">The kind of range. See <see cref="Range"/>.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <example>
/// The following example shows how to expect a method call with an integer argument
/// within the 0..100 range.
/// <code>
/// mock.Setup(x => x.HasInventory(
/// It.IsAny<string>(),
/// It.IsInRange(0, 100, Range.Inclusive)))
/// .Returns(false);
/// </code>
/// </example>
public static TValue IsInRange<TValue>(TValue from, TValue to, Range rangeKind)
where TValue : IComparable
{
return Match<TValue>.Create(value =>
{
if (value == null)
{
return false;
}
if (rangeKind == Range.Exclusive)
{
return value.CompareTo(from) > 0 && value.CompareTo(to) < 0;
}
return value.CompareTo(from) >= 0 && value.CompareTo(to) <= 0;
},
() => It.IsInRange(from, to, rangeKind));
}
/// <summary>
/// Matches any value that is present in the sequence specified.
/// </summary>
/// <param name="items">The sequence of possible values.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <example>
/// The following example shows how to expect a method call with an integer argument
/// with value from a list.
/// <code>
/// var values = new List<int> { 1, 2, 3 };
///
/// mock.Setup(x => x.HasInventory(
/// It.IsAny<string>(),
/// It.IsIn(values)))
/// .Returns(false);
/// </code>
/// </example>
public static TValue IsIn<TValue>(IEnumerable<TValue> items)
{
return Match<TValue>.Create(value => items.Contains(value), () => It.IsIn(items));
}
/// <summary>
/// Matches any value that is present in the sequence specified.
/// </summary>
/// <param name="items">The sequence of possible values.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <example>
/// The following example shows how to expect a method call with an integer argument
/// with a value of 1, 2, or 3.
/// <code>
/// mock.Setup(x => x.HasInventory(
/// It.IsAny<string>(),
/// It.IsIn(1, 2, 3)))
/// .Returns(false);
/// </code>
/// </example>
public static TValue IsIn<TValue>(params TValue[] items)
{
return Match<TValue>.Create(value => items.Contains(value), () => It.IsIn(items));
}
/// <summary>
/// Matches any value that is not found in the sequence specified.
/// </summary>
/// <param name="items">The sequence of disallowed values.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <example>
/// The following example shows how to expect a method call with an integer argument
/// with value not found from a list.
/// <code>
/// var values = new List<int> { 1, 2, 3 };
///
/// mock.Setup(x => x.HasInventory(
/// It.IsAny<string>(),
/// It.IsNotIn(values)))
/// .Returns(false);
/// </code>
/// </example>
public static TValue IsNotIn<TValue>(IEnumerable<TValue> items)
{
return Match<TValue>.Create(value => !items.Contains(value), () => It.IsNotIn(items));
}
/// <summary>
/// Matches any value that is not found in the sequence specified.
/// </summary>
/// <param name="items">The sequence of disallowed values.</param>
/// <typeparam name="TValue">Type of the argument to check.</typeparam>
/// <example>
/// The following example shows how to expect a method call with an integer argument
/// of any value except 1, 2, or 3.
/// <code>
/// mock.Setup(x => x.HasInventory(
/// It.IsAny<string>(),
/// It.IsNotIn(1, 2, 3)))
/// .Returns(false);
/// </code>
/// </example>
public static TValue IsNotIn<TValue>(params TValue[] items)
{
return Match<TValue>.Create(value => !items.Contains(value), () => It.IsNotIn(items));
}
/// <summary>
/// Matches a string argument if it matches the given regular expression pattern.
/// </summary>
/// <param name="regex">The pattern to use to match the string argument value.</param>
/// <example>
/// The following example shows how to expect a call to a method where the string argument
/// matches the given regular expression:
/// <code>
/// mock.Setup(x => x.Check(It.IsRegex("[a-z]+")))
/// .Returns(1);
/// </code>
/// </example>
public static string IsRegex(string regex)
{
Guard.NotNull(regex, nameof(regex));
// The regex is constructed only once.
var re = new Regex(regex);
// But evaluated every time :)
return Match<string>.Create(value => value != null && re.IsMatch(value), () => It.IsRegex(regex));
}
/// <summary>
/// Matches a string argument if it matches the given regular expression pattern.
/// </summary>
/// <param name="regex">The pattern to use to match the string argument value.</param>
/// <param name="options">The options used to interpret the pattern.</param>
/// <example>
/// The following example shows how to expect a call to a method where the string argument
/// matches the given regular expression, in a case insensitive way:
/// <code>
/// mock.Setup(x => x.Check(It.IsRegex("[a-z]+", RegexOptions.IgnoreCase)))
/// .Returns(1);
/// </code>
/// </example>
public static string IsRegex(string regex, RegexOptions options)
{
Guard.NotNull(regex, nameof(regex));
// The regex is constructed only once.
var re = new Regex(regex, options);
// But evaluated every time :)
return Match<string>.Create(value => value != null && re.IsMatch(value), () => It.IsRegex(regex, options));
}
/// <summary>
/// A type matcher that matches subtypes of <typeparamref name="T"/>, as well as <typeparamref name="T"/> itself.
/// </summary>
/// <typeparam name="T">The type whose subtypes should match.</typeparam>
[TypeMatcher]
public sealed class IsSubtype<T> : ITypeMatcher
{
bool ITypeMatcher.Matches(Type type)
{
return typeof(T).IsAssignableFrom(type);
}
}
/// <summary>
/// A type matcher that matches any value type.
/// </summary>
[TypeMatcher]
public readonly struct IsValueType : ITypeMatcher
{
bool ITypeMatcher.Matches(Type type)
{
return type.IsValueType;
}
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Dynamic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Text;
using XSerializer.Encryption;
namespace XSerializer
{
/// <summary>
/// A representation of a JSON object. Provides an advanced dynamic API as well as a standard
/// object API.
/// </summary>
public sealed class JsonObject : DynamicObject, IDictionary<string, object>
{
private static readonly string[] _definedProjections =
{
"AsByte",
"AsSByte",
"AsInt16",
"AsUInt16",
"AsInt32",
"AsUInt32",
"AsInt64",
"AsUInt64",
"AsDouble",
"AsSingle",
"AsDecimal",
"AsString",
"AsDateTime",
"AsDateTimeOffset",
"AsGuid"
};
private readonly Dictionary<string, object> _values = new Dictionary<string, object>();
private readonly Dictionary<string, string> _numericStringValues = new Dictionary<string, string>();
private readonly Dictionary<string, object> _projections = new Dictionary<string, object>();
private readonly IJsonSerializeOperationInfo _info;
/// <summary>
/// Initializes a new instance of the <see cref="JsonObject"/> class.
/// </summary>
public JsonObject()
: this(null, null, null, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="JsonObject"/> class.
/// </summary>
/// <param name="dateTimeHandler">The object that determines how date time values are parsed.</param>
/// <param name="encryptionMechanism">The object the performs encryption operations.</param>
/// <param name="encryptKey">A key optionally used by the encryption mechanism during encryption operations.</param>
/// <param name="serializationState">An object optionally used by the encryption mechanism to carry state across multiple encryption operations.</param>
public JsonObject(
IDateTimeHandler dateTimeHandler = null,
IEncryptionMechanism encryptionMechanism = null,
object encryptKey = null,
SerializationState serializationState = null)
: this(new JsonSerializeOperationInfo
{
DateTimeHandler = dateTimeHandler ?? DateTimeHandler.Default,
EncryptionMechanism = encryptionMechanism,
EncryptKey = encryptKey,
SerializationState = serializationState
})
{
}
internal JsonObject(IJsonSerializeOperationInfo info)
{
_info = info;
}
/// <summary>
/// Add a property to the JSON object.
/// </summary>
/// <param name="name">The name of the property.</param>
/// <param name="value">The value of the property.</param>
/// <exception cref="ArgumentNullException">If <paramref name="name"/> is null.</exception>
public void Add(string name, object value)
{
if (name == null)
{
throw new ArgumentNullException("name");
}
AddImpl(name, GuardValue(value));
}
private void AddImpl(string name, object value)
{
var jsonNumber = value as JsonNumber;
if (jsonNumber != null)
{
_values.Add(name, jsonNumber.DoubleValue);
_numericStringValues.Add(name, jsonNumber.StringValue);
}
else
{
_values.Add(name, value);
}
}
/// <summary>
/// Gets or sets the value associated with the specified property name.
/// </summary>
/// <param name="name">The name of the property</param>
/// <exception cref="KeyNotFoundException">When getting the value, if no property exists with the specified name.</exception>
public object this[string name]
{
get
{
object value;
if (TryGetValue(name, out value))
{
return value;
}
throw new KeyNotFoundException();
}
set
{
value = GuardValue(value);
if (!TrySetValueImpl(name, value))
{
AddImpl(name, value);
}
}
}
bool IDictionary<string, object>.ContainsKey(string key)
{
object dummy;
return TryGetValue(key, out dummy);
}
bool IDictionary<string, object>.Remove(string key)
{
if (_values.Remove(key))
{
RemoveProjections(key);
return true;
}
return false;
}
ICollection<string> IDictionary<string, object>.Keys
{
get { return _values.Keys; }
}
ICollection<object> IDictionary<string, object>.Values
{
get { return _values.Values; }
}
void ICollection<KeyValuePair<string, object>>.Add(KeyValuePair<string, object> item)
{
Add(item.Key, item.Value);
}
void ICollection<KeyValuePair<string, object>>.Clear()
{
_values.Clear();
_numericStringValues.Clear();
_projections.Clear();
}
bool ICollection<KeyValuePair<string, object>>.Contains(KeyValuePair<string, object> item)
{
object value;
return TryGetValue(item.Key, out value) && Equals(item.Value, value);
}
void ICollection<KeyValuePair<string, object>>.CopyTo(KeyValuePair<string, object>[] array, int arrayIndex)
{
((ICollection<KeyValuePair<string, object>>)_values).CopyTo(array, arrayIndex);
}
bool ICollection<KeyValuePair<string, object>>.Remove(KeyValuePair<string, object> item)
{
if (((ICollection<KeyValuePair<string, object>>)_values).Remove(item))
{
RemoveProjections(item.Key);
return true;
}
return false;
}
int ICollection<KeyValuePair<string, object>>.Count
{
get { return _values.Count; }
}
bool ICollection<KeyValuePair<string, object>>.IsReadOnly
{
get { return false; }
}
/// <summary>
/// Decrypts the specified property, changing its value in place.
/// </summary>
/// <param name="name">The name of the property to decrypt.</param>
/// <returns>This instance of <see cref="JsonObject"/>.</returns>
public JsonObject Decrypt(string name)
{
if (_info.EncryptionMechanism != null)
{
object value;
if (_values.TryGetValue(name, out value)
&& value is string)
{
var decryptedJson = _info.EncryptionMechanism.Decrypt(
(string)value, _info.EncryptKey, _info.SerializationState);
using (var stringReader = new StringReader(decryptedJson))
{
using (var reader = new JsonReader(stringReader, _info))
{
value = DynamicJsonSerializer.Get(false, JsonMappings.Empty).DeserializeObject(reader, _info);
if (value == null
|| value is bool
|| value is string
|| value is JsonArray
|| value is JsonObject)
{
_values[name] = value;
return this;
}
var jsonNumber = value as JsonNumber;
if (jsonNumber != null)
{
_values[name] = jsonNumber.DoubleValue;
_numericStringValues[name] = jsonNumber.StringValue;
return this;
}
throw new NotSupportedException("Unsupported value type: " + value.GetType());
}
}
}
}
return this;
}
/// <summary>
/// Encrypts the specified property, changing its value in place.
/// </summary>
/// <param name="name">The name of the property to encrypt.</param>
/// <returns>This instance of <see cref="JsonObject"/>.</returns>
public JsonObject Encrypt(string name)
{
if (_info.EncryptionMechanism != null)
{
object value;
if (_values.TryGetValue(name, out value)
&& value != null)
{
var sb = new StringBuilder();
using (var stringwriter = new StringWriter(sb))
{
using (var writer = new JsonWriter(stringwriter, _info))
{
DynamicJsonSerializer.Get(false, JsonMappings.Empty).SerializeObject(writer, value, _info);
}
}
value = _info.EncryptionMechanism.Encrypt(sb.ToString(), _info.EncryptKey, _info.SerializationState);
_values[name] = value;
}
}
return this;
}
/// <summary>
/// Gets the value of the specified property.
/// </summary>
/// <param name="name">The name of the property.</param>
/// <param name="result">
/// When this method returns, contains the value of the property, if the name exists; otherwise, null.
/// </param>
/// <returns>True if the JSON object contains the specified property; otherwise false.</returns>
public bool TryGetValue(string name, out object result)
{
if (_values.TryGetValue(name, out result)
|| _projections.TryGetValue(name, out result))
{
return true;
}
return TryGetProjection(name, ref result);
}
/// <summary>
/// Set the value of the specified property.
/// </summary>
/// <param name="name">The name of the property.</param>
/// <param name="value">The value of the property.</param>
/// <returns>True if the JSON object contains the specified property; otherwise false.</returns>
public bool TrySetValue(string name, object value)
{
return TrySetValueImpl(name, GuardValue(value));
}
private bool TrySetValueImpl(string name, object value)
{
if (_values.ContainsKey(name))
{
var jsonNumber = value as JsonNumber;
if (jsonNumber != null)
{
_values[name] = jsonNumber.DoubleValue;
_numericStringValues[name] = jsonNumber.StringValue;
}
else
{
_values[name] = value;
_numericStringValues.Remove(name);
}
RemoveProjections(name);
return true;
}
return false;
}
private static object GuardValue(object value)
{
if (value == null
|| value is bool
|| value is string
|| value is JsonNumber
|| value is JsonObject
|| value is JsonArray)
{
return value;
}
if (value is int
|| value is double
|| value is byte
|| value is long
|| value is decimal
|| value is uint
|| value is ulong
|| value is short
|| value is float
|| value is ushort
|| value is sbyte)
{
return new JsonNumber(value.ToString());
}
if (value is Guid)
{
var guid = (Guid)value;
return guid.ToString("D");
}
if (value is DateTime)
{
var dateTime = (DateTime)value;
return dateTime.ToString("O");
}
if (value is DateTimeOffset)
{
var dateTimeOffset = (DateTimeOffset)value;
return dateTimeOffset.ToString("O");
}
throw new XSerializerException("Invalid value for JsonObject member: " + value.GetType().FullName);
}
/// <summary>
/// Returns an enumerator that iterates through the collection.
/// </summary>
/// <returns>
/// A <see cref="IEnumerator{T}"/> that can be used to iterate through the collection.
/// </returns>
public IEnumerator<KeyValuePair<string, object>> GetEnumerator()
{
return _values.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return ((IEnumerable)_values).GetEnumerator();
}
/// <summary>
/// Determines whether the specified <see cref="object"/> is equal to the current <see cref="object"/>.
/// </summary>
/// <param name="obj">The <see cref="object"/> to compare with the current <see cref="object"/>.</param>
/// <returns>
/// true if the specified <see cref="object"/> is equal to the current <see cref="object"/>; otherwise, false.
/// </returns>
public override bool Equals(object obj)
{
var other = obj as JsonObject;
if (other == null)
{
return false;
}
foreach (var item in _values)
{
if (!other._values.ContainsKey(item.Key))
{
return false;
}
object value;
object otherValue;
if (_numericStringValues.ContainsKey(item.Key))
{
if (!other._numericStringValues.ContainsKey(item.Key))
{
return false;
}
value = _numericStringValues[item.Key];
otherValue = other._numericStringValues[item.Key];
}
else
{
value = _values[item.Key];
otherValue = other._values[item.Key];
}
if (!Equals(value, otherValue))
{
return false;
}
}
return true;
}
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns>
/// A hash code for the current <see cref="object"/>.
/// </returns>
public override int GetHashCode()
{
unchecked
{
var hashCode = typeof(JsonObject).GetHashCode();
foreach (var item in _values.OrderBy(x => x.Key))
{
hashCode = (hashCode * 397) ^ item.Key.GetHashCode();
hashCode = (hashCode * 397) ^ (item.Value != null ? item.Value.GetHashCode() : 0);
}
return hashCode;
}
}
/// <summary>
/// Provides the implementation for operations that get member values.
/// </summary>
/// <param name="binder">Provides information about the object that called the dynamic operation.</param>
/// <param name="result">The result of the get operation.</param>
/// <returns>
/// true if the operation is successful; otherwise, false.
/// </returns>
public override bool TryGetMember(GetMemberBinder binder, out object result)
{
return TryGetValue(binder.Name, out result);
}
/// <summary>
/// Provides the implementation for operations that set member values.
/// </summary>
/// <param name="binder">Provides information about the object that called the dynamic operation.</param>
/// <param name="value">The value to set to the member.</param>
/// <returns>
/// true if the operation is successful; otherwise, false.
/// </returns>
public override bool TrySetMember(SetMemberBinder binder, object value)
{
this[binder.Name] = value;
return true;
}
/// <summary>
/// Returns the enumeration of all dynamic member names.
/// </summary>
/// <returns>
/// A sequence that contains dynamic member names.
/// </returns>
public override IEnumerable<string> GetDynamicMemberNames()
{
return _values.Keys;
}
private bool TryGetProjection(string name, ref object result)
{
string modifiedName;
if (EndsWith(name, "AsByte", out modifiedName))
{
return AsByte(ref result, modifiedName, name);
}
if (EndsWith(name, "AsSByte", out modifiedName))
{
return AsSByte(ref result, modifiedName, name);
}
if (EndsWith(name, "AsInt16", out modifiedName))
{
return AsInt16(ref result, modifiedName, name);
}
if (EndsWith(name, "AsUInt16", out modifiedName))
{
return AsUInt16(ref result, modifiedName, name);
}
if (EndsWith(name, "AsInt32", out modifiedName))
{
return AsInt32(ref result, modifiedName, name);
}
if (EndsWith(name, "AsUInt32", out modifiedName))
{
return AsUInt32(ref result, modifiedName, name);
}
if (EndsWith(name, "AsInt64", out modifiedName))
{
return AsInt64(ref result, modifiedName, name);
}
if (EndsWith(name, "AsUInt64", out modifiedName))
{
return AsUInt64(ref result, modifiedName, name);
}
if (EndsWith(name, "AsDouble", out modifiedName))
{
return AsDouble(ref result, modifiedName);
}
if (EndsWith(name, "AsSingle", out modifiedName))
{
return AsSingle(ref result, modifiedName);
}
if (EndsWith(name, "AsDecimal", out modifiedName))
{
return AsDecimal(ref result, modifiedName, name);
}
if (EndsWith(name, "AsString", out modifiedName))
{
return AsString(ref result, modifiedName, name);
}
if (EndsWith(name, "AsDateTime", out modifiedName))
{
return AsDateTime(ref result, modifiedName, name);
}
if (EndsWith(name, "AsDateTimeOffset", out modifiedName))
{
return AsDateTimeOffset(ref result, modifiedName, name);
}
if (EndsWith(name, "AsGuid", out modifiedName))
{
return AsGuid(ref result, modifiedName, name);
}
return false;
}
private void RemoveProjections(string name)
{
var toRemove =
from projectionName in _projections.Keys
where projectionName.StartsWith(name) && _definedProjections.Any(projectionName.EndsWith)
select projectionName;
foreach (var key in toRemove)
{
_projections.Remove(key);
}
}
private static bool EndsWith(string binderName, string suffix, out string name)
{
if (binderName.EndsWith(suffix, StringComparison.InvariantCulture))
{
name = binderName.Substring(
0, binderName.LastIndexOf(suffix, StringComparison.InvariantCulture));
return true;
}
name = null;
return false;
}
private bool AsByte(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
byte byteResult;
if (byte.TryParse(value, out byteResult))
{
result = byteResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsSByte(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
sbyte sbyteResult;
if (sbyte.TryParse(value, out sbyteResult))
{
result = sbyteResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsInt16(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
short shortResult;
if (short.TryParse(value, out shortResult))
{
result = shortResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsUInt16(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
ushort ushortResult;
if (ushort.TryParse(value, out ushortResult))
{
result = ushortResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsInt32(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
int intResult;
if (int.TryParse(value, out intResult))
{
result = intResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsUInt32(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
uint uintResult;
if (uint.TryParse(value, out uintResult))
{
result = uintResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsInt64(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
long longResult;
if (long.TryParse(value, out longResult))
{
result = longResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsUInt64(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
TruncateNumber(ref value);
ulong ulongResult;
if (ulong.TryParse(value, out ulongResult))
{
result = ulongResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsDouble(ref object result, string name)
{
object value;
if (_values.TryGetValue(name, out value)
&& value is double)
{
result = value;
return true;
}
return false;
}
private bool AsSingle(ref object result, string name)
{
object value;
if (_values.TryGetValue(name, out value)
&& value is double)
{
result = (float)((double)value);
return true;
}
return false;
}
private bool AsDecimal(ref object result, string name, string binderName)
{
string value;
if (_numericStringValues.TryGetValue(name, out value))
{
decimal decimalResult;
if (decimal.TryParse(value, NumberStyles.Float, CultureInfo.InvariantCulture, out decimalResult))
{
result = decimalResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsString(ref object result, string name, string binderName)
{
object value;
if (_values.TryGetValue(name, out value))
{
if (value == null)
{
result = null;
_projections.Add(binderName, result);
return true;
}
var stringValue = value as string;
if (stringValue != null)
{
result = stringValue;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private bool AsDateTime(ref object result, string name, string binderName)
{
object value;
if (_values.TryGetValue(name, out value))
{
if (value == null)
{
result = null;
_projections.Add(binderName, result);
return true;
}
var stringValue = value as string;
if (stringValue != null)
{
try
{
result = _info.DateTimeHandler.ParseDateTime(stringValue);
_projections.Add(binderName, result);
return true;
}
catch
{
return false;
}
}
}
return false;
}
private bool AsDateTimeOffset(ref object result, string name, string binderName)
{
object value;
if (_values.TryGetValue(name, out value))
{
if (value == null)
{
result = null;
_projections.Add(binderName, result);
return true;
}
var stringValue = value as string;
if (stringValue != null)
{
try
{
result = _info.DateTimeHandler.ParseDateTimeOffset(stringValue);
_projections.Add(binderName, result);
return true;
}
catch
{
return false;
}
}
}
return false;
}
private bool AsGuid(ref object result, string name, string binderName)
{
object value;
if (_values.TryGetValue(name, out value)
&& value is string)
{
Guid guidResult;
if (Guid.TryParse((string)value, out guidResult))
{
result = guidResult;
_projections.Add(binderName, result);
return true;
}
}
return false;
}
private static void TruncateNumber(ref string value)
{
if (value.Contains('.') || value.Contains('e') || value.Contains('E'))
{
var d = double.Parse(value);
d = Math.Truncate(d);
value = d.ToString(NumberFormatInfo.InvariantInfo);
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.16.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.Internal.Resources
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure.OData;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for ManagementLocksOperations.
/// </summary>
public static partial class ManagementLocksOperationsExtensions
{
/// <summary>
/// Creates or updates a management lock at the resource group level.
/// </summary>
/// When you apply a lock at a parent scope, all child resources inherit the
/// same lock. To create management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to lock.
/// </param>
/// <param name='lockName'>
/// The lock name. The lock name can be a maximum of 260 characters. It cannot
/// contain <, > %, &, :, \\\\, ?, /, or any control characters.
/// </param>
/// <param name='parameters'>
/// The management lock parameters.
/// </param>
public static ManagementLockObject CreateOrUpdateAtResourceGroupLevel(this IManagementLocksOperations operations, string resourceGroupName, string lockName, ManagementLockObject parameters)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).CreateOrUpdateAtResourceGroupLevelAsync(resourceGroupName, lockName, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates a management lock at the resource group level.
/// </summary>
/// When you apply a lock at a parent scope, all child resources inherit the
/// same lock. To create management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group to lock.
/// </param>
/// <param name='lockName'>
/// The lock name. The lock name can be a maximum of 260 characters. It cannot
/// contain <, > %, &, :, \\\\, ?, /, or any control characters.
/// </param>
/// <param name='parameters'>
/// The management lock parameters.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> CreateOrUpdateAtResourceGroupLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string lockName, ManagementLockObject parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateAtResourceGroupLevelWithHttpMessagesAsync(resourceGroupName, lockName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes a management lock at the resource group level.
/// </summary>
/// To delete management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the lock.
/// </param>
/// <param name='lockName'>
/// The name of lock to delete.
/// </param>
public static void DeleteAtResourceGroupLevel(this IManagementLocksOperations operations, string resourceGroupName, string lockName)
{
Task.Factory.StartNew(s => ((IManagementLocksOperations)s).DeleteAtResourceGroupLevelAsync(resourceGroupName, lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes a management lock at the resource group level.
/// </summary>
/// To delete management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the lock.
/// </param>
/// <param name='lockName'>
/// The name of lock to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAtResourceGroupLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteAtResourceGroupLevelWithHttpMessagesAsync(resourceGroupName, lockName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets a management lock at the resource group level.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the locked resource group.
/// </param>
/// <param name='lockName'>
/// The name of the lock to get.
/// </param>
public static ManagementLockObject GetAtResourceGroupLevel(this IManagementLocksOperations operations, string resourceGroupName, string lockName)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).GetAtResourceGroupLevelAsync(resourceGroupName, lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets a management lock at the resource group level.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the locked resource group.
/// </param>
/// <param name='lockName'>
/// The name of the lock to get.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> GetAtResourceGroupLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetAtResourceGroupLevelWithHttpMessagesAsync(resourceGroupName, lockName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Create or update a management lock by scope.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='scope'>
/// The scope for the lock. When providing a scope for the assignment, use
/// '/subscriptions/{subscriptionId}' for subscriptions,
/// '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}' for
/// resource groups, and
/// '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
/// for resources.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
/// <param name='parameters'>
/// Create or update management lock parameters.
/// </param>
public static ManagementLockObject CreateOrUpdateByScope(this IManagementLocksOperations operations, string scope, string lockName, ManagementLockObject parameters)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).CreateOrUpdateByScopeAsync(scope, lockName, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Create or update a management lock by scope.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='scope'>
/// The scope for the lock. When providing a scope for the assignment, use
/// '/subscriptions/{subscriptionId}' for subscriptions,
/// '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}' for
/// resource groups, and
/// '/subscriptions/{subscriptionId}/resourcegroups/{resourceGroupName}/providers/{resourceProviderNamespace}/{parentResourcePathIfPresent}/{resourceType}/{resourceName}'
/// for resources.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
/// <param name='parameters'>
/// Create or update management lock parameters.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> CreateOrUpdateByScopeAsync(this IManagementLocksOperations operations, string scope, string lockName, ManagementLockObject parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateByScopeWithHttpMessagesAsync(scope, lockName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Delete a management lock by scope.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='scope'>
/// The scope for the lock.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
public static void DeleteByScope(this IManagementLocksOperations operations, string scope, string lockName)
{
Task.Factory.StartNew(s => ((IManagementLocksOperations)s).DeleteByScopeAsync(scope, lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Delete a management lock by scope.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='scope'>
/// The scope for the lock.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteByScopeAsync(this IManagementLocksOperations operations, string scope, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteByScopeWithHttpMessagesAsync(scope, lockName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get a management lock by scope.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='scope'>
/// The scope for the lock.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
public static ManagementLockObject GetByScope(this IManagementLocksOperations operations, string scope, string lockName)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).GetByScopeAsync(scope, lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get a management lock by scope.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='scope'>
/// The scope for the lock.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> GetByScopeAsync(this IManagementLocksOperations operations, string scope, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetByScopeWithHttpMessagesAsync(scope, lockName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates a management lock at the resource level or any level
/// below the resource.
/// </summary>
/// When you apply a lock at a parent scope, all child resources inherit the
/// same lock. To create management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the resource to lock.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The resource provider namespace of the resource to lock.
/// </param>
/// <param name='parentResourcePath'>
/// The parent resource identity.
/// </param>
/// <param name='resourceType'>
/// The resource type of the resource to lock.
/// </param>
/// <param name='resourceName'>
/// The name of the resource to lock.
/// </param>
/// <param name='lockName'>
/// The name of lock. The lock name can be a maximum of 260 characters. It
/// cannot contain <, > %, &, :, \\\\, ?, /, or any control
/// characters.
/// </param>
/// <param name='parameters'>
/// Parameters for creating or updating a management lock.
/// </param>
public static ManagementLockObject CreateOrUpdateAtResourceLevel(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, string lockName, ManagementLockObject parameters)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).CreateOrUpdateAtResourceLevelAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, lockName, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates a management lock at the resource level or any level
/// below the resource.
/// </summary>
/// When you apply a lock at a parent scope, all child resources inherit the
/// same lock. To create management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the resource to lock.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The resource provider namespace of the resource to lock.
/// </param>
/// <param name='parentResourcePath'>
/// The parent resource identity.
/// </param>
/// <param name='resourceType'>
/// The resource type of the resource to lock.
/// </param>
/// <param name='resourceName'>
/// The name of the resource to lock.
/// </param>
/// <param name='lockName'>
/// The name of lock. The lock name can be a maximum of 260 characters. It
/// cannot contain <, > %, &, :, \\\\, ?, /, or any control
/// characters.
/// </param>
/// <param name='parameters'>
/// Parameters for creating or updating a management lock.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> CreateOrUpdateAtResourceLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, string lockName, ManagementLockObject parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateAtResourceLevelWithHttpMessagesAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, lockName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the management lock of a resource or any level below the resource.
/// </summary>
/// To delete management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the resource with the lock to
/// delete.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The resource provider namespace of the resource with the lock to delete.
/// </param>
/// <param name='parentResourcePath'>
/// The parent resource identity.
/// </param>
/// <param name='resourceType'>
/// The resource type of the resource with the lock to delete.
/// </param>
/// <param name='resourceName'>
/// The name of the resource with the lock to delete.
/// </param>
/// <param name='lockName'>
/// The name of the lock to delete.
/// </param>
public static void DeleteAtResourceLevel(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, string lockName)
{
Task.Factory.StartNew(s => ((IManagementLocksOperations)s).DeleteAtResourceLevelAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the management lock of a resource or any level below the resource.
/// </summary>
/// To delete management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the resource with the lock to
/// delete.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The resource provider namespace of the resource with the lock to delete.
/// </param>
/// <param name='parentResourcePath'>
/// The parent resource identity.
/// </param>
/// <param name='resourceType'>
/// The resource type of the resource with the lock to delete.
/// </param>
/// <param name='resourceName'>
/// The name of the resource with the lock to delete.
/// </param>
/// <param name='lockName'>
/// The name of the lock to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAtResourceLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteAtResourceLevelWithHttpMessagesAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, lockName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Get the management lock of a resource or any level below resource.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider.
/// </param>
/// <param name='parentResourcePath'>
/// An extra path parameter needed in some services, like SQL Databases.
/// </param>
/// <param name='resourceType'>
/// The type of the resource.
/// </param>
/// <param name='resourceName'>
/// The name of the resource.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
public static ManagementLockObject GetAtResourceLevel(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, string lockName)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).GetAtResourceLevelAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Get the management lock of a resource or any level below resource.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider.
/// </param>
/// <param name='parentResourcePath'>
/// An extra path parameter needed in some services, like SQL Databases.
/// </param>
/// <param name='resourceType'>
/// The type of the resource.
/// </param>
/// <param name='resourceName'>
/// The name of the resource.
/// </param>
/// <param name='lockName'>
/// The name of lock.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> GetAtResourceLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetAtResourceLevelWithHttpMessagesAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, lockName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates or updates a management lock at the subscription level.
/// </summary>
/// When you apply a lock at a parent scope, all child resources inherit the
/// same lock. To create management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='lockName'>
/// The name of lock. The lock name can be a maximum of 260 characters. It
/// cannot contain <, > %, &, :, \\\\, ?, /, or any control
/// characters.
/// </param>
/// <param name='parameters'>
/// The management lock parameters.
/// </param>
public static ManagementLockObject CreateOrUpdateAtSubscriptionLevel(this IManagementLocksOperations operations, string lockName, ManagementLockObject parameters)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).CreateOrUpdateAtSubscriptionLevelAsync(lockName, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates or updates a management lock at the subscription level.
/// </summary>
/// When you apply a lock at a parent scope, all child resources inherit the
/// same lock. To create management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='lockName'>
/// The name of lock. The lock name can be a maximum of 260 characters. It
/// cannot contain <, > %, &, :, \\\\, ?, /, or any control
/// characters.
/// </param>
/// <param name='parameters'>
/// The management lock parameters.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> CreateOrUpdateAtSubscriptionLevelAsync(this IManagementLocksOperations operations, string lockName, ManagementLockObject parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateOrUpdateAtSubscriptionLevelWithHttpMessagesAsync(lockName, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the management lock at the subscription level.
/// </summary>
/// To delete management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='lockName'>
/// The name of lock to delete.
/// </param>
public static void DeleteAtSubscriptionLevel(this IManagementLocksOperations operations, string lockName)
{
Task.Factory.StartNew(s => ((IManagementLocksOperations)s).DeleteAtSubscriptionLevelAsync(lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the management lock at the subscription level.
/// </summary>
/// To delete management locks, you must have access to
/// Microsoft.Authorization/* or Microsoft.Authorization/locks/* actions. Of
/// the built-in roles, only Owner and User Access Administrator are granted
/// those actions.
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='lockName'>
/// The name of lock to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAtSubscriptionLevelAsync(this IManagementLocksOperations operations, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteAtSubscriptionLevelWithHttpMessagesAsync(lockName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets a management lock at the subscription level.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='lockName'>
/// The name of the lock to get.
/// </param>
public static ManagementLockObject GetAtSubscriptionLevel(this IManagementLocksOperations operations, string lockName)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).GetAtSubscriptionLevelAsync(lockName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets a management lock at the subscription level.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='lockName'>
/// The name of the lock to get.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<ManagementLockObject> GetAtSubscriptionLevelAsync(this IManagementLocksOperations operations, string lockName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetAtSubscriptionLevelWithHttpMessagesAsync(lockName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the management locks for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the locks to get.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IPage<ManagementLockObject> ListAtResourceGroupLevel(this IManagementLocksOperations operations, string resourceGroupName, ODataQuery<ManagementLockObject> odataQuery = default(ODataQuery<ManagementLockObject>))
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).ListAtResourceGroupLevelAsync(resourceGroupName, odataQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the management locks for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the locks to get.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ManagementLockObject>> ListAtResourceGroupLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, ODataQuery<ManagementLockObject> odataQuery = default(ODataQuery<ManagementLockObject>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAtResourceGroupLevelWithHttpMessagesAsync(resourceGroupName, odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the management locks for a resource or any level below resource.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the locked resource. The name is
/// case insensitive.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider.
/// </param>
/// <param name='parentResourcePath'>
/// The parent resource identity.
/// </param>
/// <param name='resourceType'>
/// The resource type of the locked resource.
/// </param>
/// <param name='resourceName'>
/// The name of the locked resource.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IPage<ManagementLockObject> ListAtResourceLevel(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, ODataQuery<ManagementLockObject> odataQuery = default(ODataQuery<ManagementLockObject>))
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).ListAtResourceLevelAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, odataQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the management locks for a resource or any level below resource.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the resource group containing the locked resource. The name is
/// case insensitive.
/// </param>
/// <param name='resourceProviderNamespace'>
/// The namespace of the resource provider.
/// </param>
/// <param name='parentResourcePath'>
/// The parent resource identity.
/// </param>
/// <param name='resourceType'>
/// The resource type of the locked resource.
/// </param>
/// <param name='resourceName'>
/// The name of the locked resource.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ManagementLockObject>> ListAtResourceLevelAsync(this IManagementLocksOperations operations, string resourceGroupName, string resourceProviderNamespace, string parentResourcePath, string resourceType, string resourceName, ODataQuery<ManagementLockObject> odataQuery = default(ODataQuery<ManagementLockObject>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAtResourceLevelWithHttpMessagesAsync(resourceGroupName, resourceProviderNamespace, parentResourcePath, resourceType, resourceName, odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the management locks for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
public static IPage<ManagementLockObject> ListAtSubscriptionLevel(this IManagementLocksOperations operations, ODataQuery<ManagementLockObject> odataQuery = default(ODataQuery<ManagementLockObject>))
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).ListAtSubscriptionLevelAsync(odataQuery), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the management locks for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ManagementLockObject>> ListAtSubscriptionLevelAsync(this IManagementLocksOperations operations, ODataQuery<ManagementLockObject> odataQuery = default(ODataQuery<ManagementLockObject>), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAtSubscriptionLevelWithHttpMessagesAsync(odataQuery, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the management locks for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<ManagementLockObject> ListAtResourceGroupLevelNext(this IManagementLocksOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).ListAtResourceGroupLevelNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the management locks for a resource group.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ManagementLockObject>> ListAtResourceGroupLevelNextAsync(this IManagementLocksOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAtResourceGroupLevelNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the management locks for a resource or any level below resource.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<ManagementLockObject> ListAtResourceLevelNext(this IManagementLocksOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).ListAtResourceLevelNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the management locks for a resource or any level below resource.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ManagementLockObject>> ListAtResourceLevelNextAsync(this IManagementLocksOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAtResourceLevelNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Gets all the management locks for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<ManagementLockObject> ListAtSubscriptionLevelNext(this IManagementLocksOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IManagementLocksOperations)s).ListAtSubscriptionLevelNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets all the management locks for a subscription.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<ManagementLockObject>> ListAtSubscriptionLevelNextAsync(this IManagementLocksOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListAtSubscriptionLevelNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
//#define USE_SharpZipLib
#if !UNITY_WEBPLAYER
#define USE_FileIO
#endif
/* * * * *
* A simple JSON Parser / builder
* ------------------------------
*
* It mainly has been written as a simple JSON parser. It can build a JSON string
* from the node-tree, or generate a node tree from any valid JSON string.
*
* If you want to use compression when saving to file / stream / B64 you have to include
* SharpZipLib ( http://www.icsharpcode.net/opensource/sharpziplib/ ) in your project and
* define "USE_SharpZipLib" at the top of the file
*
* Written by Bunny83
* 2012-06-09
*
* Features / attributes:
* - provides strongly typed node classes and lists / dictionaries
* - provides easy access to class members / array items / data values
* - the parser ignores data types. Each value is a string.
* - only double quotes (") are used for quoting strings.
* - values and names are not restricted to quoted strings. They simply add up and are trimmed.
* - There are only 3 types: arrays(JSONArray), objects(JSONClass) and values(JSONData)
* - provides "casting" properties to easily convert to / from those types:
* int / float / double / bool
* - provides a common interface for each node so no explicit casting is required.
* - the parser try to avoid errors, but if malformed JSON is parsed the result is undefined
*
*
* 2012-12-17 Update:
* - Added internal JSONLazyCreator class which simplifies the construction of a JSON tree
* Now you can simple reference any item that doesn't exist yet and it will return a JSONLazyCreator
* The class determines the required type by it's further use, creates the type and removes itself.
* - Added binary serialization / deserialization.
* - Added support for BZip2 zipped binary format. Requires the SharpZipLib ( http://www.icsharpcode.net/opensource/sharpziplib/ )
* The usage of the SharpZipLib library can be disabled by removing or commenting out the USE_SharpZipLib define at the top
* - The serializer uses different types when it comes to store the values. Since my data values
* are all of type string, the serializer will "try" which format fits best. The order is: int, float, double, bool, string.
* It's not the most efficient way but for a moderate amount of data it should work on all platforms.
*
* * * * */
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
namespace SimpleJSON
{
public enum JSONBinaryTag
{
Array = 1,
Class = 2,
Value = 3,
IntValue = 4,
DoubleValue = 5,
BoolValue = 6,
FloatValue = 7,
}
public class JSONNode
{
#region common interface
public virtual void Add(string aKey, JSONNode aItem){ }
public virtual JSONNode this[int aIndex] { get { return null; } set { } }
public virtual JSONNode this[string aKey] { get { return null; } set { } }
public virtual string Value { get { return ""; } set { } }
public virtual int Count { get { return 0; } }
public virtual void Add(JSONNode aItem)
{
Add("", aItem);
}
public virtual JSONNode Remove(string aKey) { return null; }
public virtual JSONNode Remove(int aIndex) { return null; }
public virtual JSONNode Remove(JSONNode aNode) { return aNode; }
public virtual IEnumerable<JSONNode> Childs { get { yield break;} }
public IEnumerable<JSONNode> DeepChilds
{
get
{
foreach (var C in Childs)
foreach (var D in C.DeepChilds)
yield return D;
}
}
public override string ToString()
{
return "JSONNode";
}
public virtual string ToString(string aPrefix)
{
return "JSONNode";
}
#endregion common interface
#region typecasting properties
public virtual int AsInt
{
get
{
int v = 0;
if (int.TryParse(Value,out v))
return v;
return 0;
}
set
{
Value = value.ToString();
}
}
public virtual float AsFloat
{
get
{
float v = 0.0f;
if (float.TryParse(Value,out v))
return v;
return 0.0f;
}
set
{
Value = value.ToString();
}
}
public virtual double AsDouble
{
get
{
double v = 0.0;
if (double.TryParse(Value,out v))
return v;
return 0.0;
}
set
{
Value = value.ToString();
}
}
public virtual bool AsBool
{
get
{
bool v = false;
if (bool.TryParse(Value,out v))
return v;
return !string.IsNullOrEmpty(Value);
}
set
{
Value = (value)?"true":"false";
}
}
public virtual JSONArray AsArray
{
get
{
return this as JSONArray;
}
}
public virtual JSONClass AsObject
{
get
{
return this as JSONClass;
}
}
#endregion typecasting properties
#region operators
public static implicit operator JSONNode(string s)
{
return new JSONData(s);
}
public static implicit operator string(JSONNode d)
{
return (d == null)?null:d.Value;
}
public static bool operator ==(JSONNode a, object b)
{
if (b == null && a is JSONLazyCreator)
return true;
return System.Object.ReferenceEquals(a,b);
}
public static bool operator !=(JSONNode a, object b)
{
return !(a == b);
}
public override bool Equals (object obj)
{
return System.Object.ReferenceEquals(this, obj);
}
public override int GetHashCode ()
{
return base.GetHashCode();
}
#endregion operators
internal static string Escape(string aText)
{
string result = "";
foreach(char c in aText)
{
switch(c)
{
case '\\' : result += "\\\\"; break;
case '\"' : result += "\\\""; break;
case '\n' : result += "\\n" ; break;
case '\r' : result += "\\r" ; break;
case '\t' : result += "\\t" ; break;
case '\b' : result += "\\b" ; break;
case '\f' : result += "\\f" ; break;
default : result += c ; break;
}
}
return result;
}
public static JSONNode Parse(string aJSON)
{
Stack<JSONNode> stack = new Stack<JSONNode>();
JSONNode ctx = null;
int i = 0;
string Token = "";
string TokenName = "";
bool QuoteMode = false;
while (i < aJSON.Length)
{
switch (aJSON[i])
{
case '{':
if (QuoteMode)
{
Token += aJSON[i];
break;
}
stack.Push(new JSONClass());
if (ctx != null)
{
TokenName = TokenName.Trim();
if (ctx is JSONArray)
ctx.Add(stack.Peek());
else if (TokenName != "")
ctx.Add(TokenName,stack.Peek());
}
TokenName = "";
Token = "";
ctx = stack.Peek();
break;
case '[':
if (QuoteMode)
{
Token += aJSON[i];
break;
}
stack.Push(new JSONArray());
if (ctx != null)
{
TokenName = TokenName.Trim();
if (ctx is JSONArray)
ctx.Add(stack.Peek());
else if (TokenName != "")
ctx.Add(TokenName,stack.Peek());
}
TokenName = "";
Token = "";
ctx = stack.Peek();
break;
case '}':
case ']':
if (QuoteMode)
{
Token += aJSON[i];
break;
}
if (stack.Count == 0)
throw new Exception("JSON Parse: Too many closing brackets");
stack.Pop();
if (Token != "")
{
TokenName = TokenName.Trim();
if (ctx is JSONArray)
ctx.Add(Token);
else if (TokenName != "")
ctx.Add(TokenName,Token);
}
TokenName = "";
Token = "";
if (stack.Count>0)
ctx = stack.Peek();
break;
case ':':
if (QuoteMode)
{
Token += aJSON[i];
break;
}
TokenName = Token;
Token = "";
break;
case '"':
QuoteMode ^= true;
break;
case ',':
if (QuoteMode)
{
Token += aJSON[i];
break;
}
if (Token != "")
{
if (ctx is JSONArray)
ctx.Add(Token);
else if (TokenName != "")
ctx.Add(TokenName, Token);
}
TokenName = "";
Token = "";
break;
case '\r':
case '\n':
break;
case ' ':
case '\t':
if (QuoteMode)
Token += aJSON[i];
break;
case '\\':
++i;
if (QuoteMode)
{
char C = aJSON[i];
switch (C)
{
case 't' : Token += '\t'; break;
case 'r' : Token += '\r'; break;
case 'n' : Token += '\n'; break;
case 'b' : Token += '\b'; break;
case 'f' : Token += '\f'; break;
case 'u':
{
string s = aJSON.Substring(i+1,4);
Token += (char)int.Parse(s, System.Globalization.NumberStyles.AllowHexSpecifier);
i += 4;
break;
}
default : Token += C; break;
}
}
break;
default:
Token += aJSON[i];
break;
}
++i;
}
if (QuoteMode)
{
throw new Exception("JSON Parse: Quotation marks seems to be messed up.");
}
return ctx;
}
public virtual void Serialize(System.IO.BinaryWriter aWriter) {}
public void SaveToStream(System.IO.Stream aData)
{
var W = new System.IO.BinaryWriter(aData);
Serialize(W);
}
#if USE_SharpZipLib
public void SaveToCompressedStream(System.IO.Stream aData)
{
using (var gzipOut = new ICSharpCode.SharpZipLib.BZip2.BZip2OutputStream(aData))
{
gzipOut.IsStreamOwner = false;
SaveToStream(gzipOut);
gzipOut.Close();
}
}
public void SaveToCompressedFile(string aFileName)
{
#if USE_FileIO
System.IO.Directory.CreateDirectory((new System.IO.FileInfo(aFileName)).Directory.FullName);
using(var F = System.IO.File.OpenWrite(aFileName))
{
SaveToCompressedStream(F);
}
#else
throw new Exception("Can't use File IO stuff in webplayer");
#endif
}
public string SaveToCompressedBase64()
{
using (var stream = new System.IO.MemoryStream())
{
SaveToCompressedStream(stream);
stream.Position = 0;
return System.Convert.ToBase64String(stream.ToArray());
}
}
#else
public void SaveToCompressedStream(System.IO.Stream aData)
{
throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON");
}
public void SaveToCompressedFile(string aFileName)
{
throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON");
}
public string SaveToCompressedBase64()
{
throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON");
}
#endif
public void SaveToFile(string aFileName)
{
#if USE_FileIO
System.IO.Directory.CreateDirectory((new System.IO.FileInfo(aFileName)).Directory.FullName);
using(var F = System.IO.File.OpenWrite(aFileName))
{
SaveToStream(F);
}
#else
throw new Exception("Can't use File IO stuff in webplayer");
#endif
}
public string SaveToBase64()
{
using (var stream = new System.IO.MemoryStream())
{
SaveToStream(stream);
stream.Position = 0;
return System.Convert.ToBase64String(stream.ToArray());
}
}
public static JSONNode Deserialize(System.IO.BinaryReader aReader)
{
JSONBinaryTag type = (JSONBinaryTag)aReader.ReadByte();
switch(type)
{
case JSONBinaryTag.Array:
{
int count = aReader.ReadInt32();
JSONArray tmp = new JSONArray();
for(int i = 0; i < count; i++)
tmp.Add(Deserialize(aReader));
return tmp;
}
case JSONBinaryTag.Class:
{
int count = aReader.ReadInt32();
JSONClass tmp = new JSONClass();
for(int i = 0; i < count; i++)
{
string key = aReader.ReadString();
var val = Deserialize(aReader);
tmp.Add(key, val);
}
return tmp;
}
case JSONBinaryTag.Value:
{
return new JSONData(aReader.ReadString());
}
case JSONBinaryTag.IntValue:
{
return new JSONData(aReader.ReadInt32());
}
case JSONBinaryTag.DoubleValue:
{
return new JSONData(aReader.ReadDouble());
}
case JSONBinaryTag.BoolValue:
{
return new JSONData(aReader.ReadBoolean());
}
case JSONBinaryTag.FloatValue:
{
return new JSONData(aReader.ReadSingle());
}
default:
{
throw new Exception("Error deserializing JSON. Unknown tag: " + type);
}
}
}
#if USE_SharpZipLib
public static JSONNode LoadFromCompressedStream(System.IO.Stream aData)
{
var zin = new ICSharpCode.SharpZipLib.BZip2.BZip2InputStream(aData);
return LoadFromStream(zin);
}
public static JSONNode LoadFromCompressedFile(string aFileName)
{
#if USE_FileIO
using(var F = System.IO.File.OpenRead(aFileName))
{
return LoadFromCompressedStream(F);
}
#else
throw new Exception("Can't use File IO stuff in webplayer");
#endif
}
public static JSONNode LoadFromCompressedBase64(string aBase64)
{
var tmp = System.Convert.FromBase64String(aBase64);
var stream = new System.IO.MemoryStream(tmp);
stream.Position = 0;
return LoadFromCompressedStream(stream);
}
#else
public static JSONNode LoadFromCompressedFile(string aFileName)
{
throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON");
}
public static JSONNode LoadFromCompressedStream(System.IO.Stream aData)
{
throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON");
}
public static JSONNode LoadFromCompressedBase64(string aBase64)
{
throw new Exception("Can't use compressed functions. You need include the SharpZipLib and uncomment the define at the top of SimpleJSON");
}
#endif
public static JSONNode LoadFromStream(System.IO.Stream aData)
{
using(var R = new System.IO.BinaryReader(aData))
{
return Deserialize(R);
}
}
public static JSONNode LoadFromFile(string aFileName)
{
#if USE_FileIO
using(var F = System.IO.File.OpenRead(aFileName))
{
return LoadFromStream(F);
}
#else
throw new Exception("Can't use File IO stuff in webplayer");
#endif
}
public static JSONNode LoadFromBase64(string aBase64)
{
var tmp = System.Convert.FromBase64String(aBase64);
var stream = new System.IO.MemoryStream(tmp);
stream.Position = 0;
return LoadFromStream(stream);
}
} // End of JSONNode
public class JSONArray : JSONNode, IEnumerable
{
private List<JSONNode> m_List = new List<JSONNode>();
public override JSONNode this[int aIndex]
{
get
{
if (aIndex<0 || aIndex >= m_List.Count)
return new JSONLazyCreator(this);
return m_List[aIndex];
}
set
{
if (aIndex<0 || aIndex >= m_List.Count)
m_List.Add(value);
else
m_List[aIndex] = value;
}
}
public override JSONNode this[string aKey]
{
get{ return new JSONLazyCreator(this);}
set{ m_List.Add(value); }
}
public override int Count
{
get { return m_List.Count; }
}
public override void Add(string aKey, JSONNode aItem)
{
m_List.Add(aItem);
}
public override JSONNode Remove(int aIndex)
{
if (aIndex < 0 || aIndex >= m_List.Count)
return null;
JSONNode tmp = m_List[aIndex];
m_List.RemoveAt(aIndex);
return tmp;
}
public override JSONNode Remove(JSONNode aNode)
{
m_List.Remove(aNode);
return aNode;
}
public override IEnumerable<JSONNode> Childs
{
get
{
foreach(JSONNode N in m_List)
yield return N;
}
}
public IEnumerator GetEnumerator()
{
foreach(JSONNode N in m_List)
yield return N;
}
public override string ToString()
{
string result = "[ ";
foreach (JSONNode N in m_List)
{
if (result.Length > 2)
result += ", ";
result += N.ToString();
}
result += " ]";
return result;
}
public override string ToString(string aPrefix)
{
string result = "[ ";
foreach (JSONNode N in m_List)
{
if (result.Length > 3)
result += ", ";
result += "\n" + aPrefix + " ";
result += N.ToString(aPrefix+" ");
}
result += "\n" + aPrefix + "]";
return result;
}
public override void Serialize (System.IO.BinaryWriter aWriter)
{
aWriter.Write((byte)JSONBinaryTag.Array);
aWriter.Write(m_List.Count);
for(int i = 0; i < m_List.Count; i++)
{
m_List[i].Serialize(aWriter);
}
}
} // End of JSONArray
public class JSONClass : JSONNode, IEnumerable
{
private Dictionary<string,JSONNode> m_Dict = new Dictionary<string,JSONNode>();
public ArrayList GetKeys() // The method is named "GetKeys()"
{
ArrayList arrayOfStrings = new ArrayList(); // declares new array
foreach (KeyValuePair<string, JSONNode> N in m_Dict) // for each key/values
arrayOfStrings.Add(N.Key); // I add only the keys
return arrayOfStrings; // And then I get them all :D
}
public override JSONNode this[string aKey]
{
get
{
if (m_Dict.ContainsKey(aKey))
return m_Dict[aKey];
else
return new JSONLazyCreator(this, aKey);
}
set
{
if (m_Dict.ContainsKey(aKey))
m_Dict[aKey] = value;
else
m_Dict.Add(aKey,value);
}
}
public override JSONNode this[int aIndex]
{
get
{
if (aIndex < 0 || aIndex >= m_Dict.Count)
return null;
return m_Dict.ElementAt(aIndex).Value;
}
set
{
if (aIndex < 0 || aIndex >= m_Dict.Count)
return;
string key = m_Dict.ElementAt(aIndex).Key;
m_Dict[key] = value;
}
}
public override int Count
{
get { return m_Dict.Count; }
}
public override void Add(string aKey, JSONNode aItem)
{
if (!string.IsNullOrEmpty(aKey))
{
if (m_Dict.ContainsKey(aKey))
m_Dict[aKey] = aItem;
else
m_Dict.Add(aKey, aItem);
}
else
m_Dict.Add(Guid.NewGuid().ToString(), aItem);
}
public override JSONNode Remove(string aKey)
{
if (!m_Dict.ContainsKey(aKey))
return null;
JSONNode tmp = m_Dict[aKey];
m_Dict.Remove(aKey);
return tmp;
}
public override JSONNode Remove(int aIndex)
{
if (aIndex < 0 || aIndex >= m_Dict.Count)
return null;
var item = m_Dict.ElementAt(aIndex);
m_Dict.Remove(item.Key);
return item.Value;
}
public override JSONNode Remove(JSONNode aNode)
{
try
{
var item = m_Dict.Where(k => k.Value == aNode).First();
m_Dict.Remove(item.Key);
return aNode;
}
catch
{
return null;
}
}
public override IEnumerable<JSONNode> Childs
{
get
{
foreach(KeyValuePair<string,JSONNode> N in m_Dict)
yield return N.Value;
}
}
public IEnumerator GetEnumerator()
{
foreach(KeyValuePair<string, JSONNode> N in m_Dict)
yield return N;
}
public override string ToString()
{
string result = "{";
foreach (KeyValuePair<string, JSONNode> N in m_Dict)
{
if (result.Length > 2)
result += ", ";
result += "\"" + Escape(N.Key) + "\":" + N.Value.ToString();
}
result += "}";
return result;
}
public override string ToString(string aPrefix)
{
string result = "{ ";
foreach (KeyValuePair<string, JSONNode> N in m_Dict)
{
if (result.Length > 3)
result += ", ";
result += "\n" + aPrefix + " ";
result += "\"" + Escape(N.Key) + "\" : " + N.Value.ToString(aPrefix+" ");
}
result += "\n" + aPrefix + "}";
return result;
}
public override void Serialize (System.IO.BinaryWriter aWriter)
{
aWriter.Write((byte)JSONBinaryTag.Class);
aWriter.Write(m_Dict.Count);
foreach(string K in m_Dict.Keys)
{
aWriter.Write(K);
m_Dict[K].Serialize(aWriter);
}
}
} // End of JSONClass
public class JSONData : JSONNode
{
private string m_Data;
public override string Value
{
get { return m_Data; }
set { m_Data = value; }
}
public JSONData(string aData)
{
m_Data = aData;
}
public JSONData(float aData)
{
AsFloat = aData;
}
public JSONData(double aData)
{
AsDouble = aData;
}
public JSONData(bool aData)
{
AsBool = aData;
}
public JSONData(int aData)
{
AsInt = aData;
}
public override string ToString()
{
return "\"" + Escape(m_Data) + "\"";
}
public override string ToString(string aPrefix)
{
return "\"" + Escape(m_Data) + "\"";
}
public override void Serialize (System.IO.BinaryWriter aWriter)
{
var tmp = new JSONData("");
tmp.AsInt = AsInt;
if (tmp.m_Data == this.m_Data)
{
aWriter.Write((byte)JSONBinaryTag.IntValue);
aWriter.Write(AsInt);
return;
}
tmp.AsFloat = AsFloat;
if (tmp.m_Data == this.m_Data)
{
aWriter.Write((byte)JSONBinaryTag.FloatValue);
aWriter.Write(AsFloat);
return;
}
tmp.AsDouble = AsDouble;
if (tmp.m_Data == this.m_Data)
{
aWriter.Write((byte)JSONBinaryTag.DoubleValue);
aWriter.Write(AsDouble);
return;
}
tmp.AsBool = AsBool;
if (tmp.m_Data == this.m_Data)
{
aWriter.Write((byte)JSONBinaryTag.BoolValue);
aWriter.Write(AsBool);
return;
}
aWriter.Write((byte)JSONBinaryTag.Value);
aWriter.Write(m_Data);
}
} // End of JSONData
internal class JSONLazyCreator : JSONNode
{
private JSONNode m_Node = null;
private string m_Key = null;
public JSONLazyCreator(JSONNode aNode)
{
m_Node = aNode;
m_Key = null;
}
public JSONLazyCreator(JSONNode aNode, string aKey)
{
m_Node = aNode;
m_Key = aKey;
}
private void Set(JSONNode aVal)
{
if (m_Key == null)
{
m_Node.Add(aVal);
}
else
{
m_Node.Add(m_Key, aVal);
}
m_Node = null; // Be GC friendly.
}
public override JSONNode this[int aIndex]
{
get
{
return new JSONLazyCreator(this);
}
set
{
var tmp = new JSONArray();
tmp.Add(value);
Set(tmp);
}
}
public override JSONNode this[string aKey]
{
get
{
return new JSONLazyCreator(this, aKey);
}
set
{
var tmp = new JSONClass();
tmp.Add(aKey, value);
Set(tmp);
}
}
public override void Add (JSONNode aItem)
{
var tmp = new JSONArray();
tmp.Add(aItem);
Set(tmp);
}
public override void Add (string aKey, JSONNode aItem)
{
var tmp = new JSONClass();
tmp.Add(aKey, aItem);
Set(tmp);
}
public static bool operator ==(JSONLazyCreator a, object b)
{
if (b == null)
return true;
return System.Object.ReferenceEquals(a,b);
}
public static bool operator !=(JSONLazyCreator a, object b)
{
return !(a == b);
}
public override bool Equals (object obj)
{
if (obj == null)
return true;
return System.Object.ReferenceEquals(this, obj);
}
public override int GetHashCode ()
{
return base.GetHashCode();
}
public override string ToString()
{
return "";
}
public override string ToString(string aPrefix)
{
return "";
}
public override int AsInt
{
get
{
JSONData tmp = new JSONData(0);
Set(tmp);
return 0;
}
set
{
JSONData tmp = new JSONData(value);
Set(tmp);
}
}
public override float AsFloat
{
get
{
JSONData tmp = new JSONData(0.0f);
Set(tmp);
return 0.0f;
}
set
{
JSONData tmp = new JSONData(value);
Set(tmp);
}
}
public override double AsDouble
{
get
{
JSONData tmp = new JSONData(0.0);
Set(tmp);
return 0.0;
}
set
{
JSONData tmp = new JSONData(value);
Set(tmp);
}
}
public override bool AsBool
{
get
{
JSONData tmp = new JSONData(false);
Set(tmp);
return false;
}
set
{
JSONData tmp = new JSONData(value);
Set(tmp);
}
}
public override JSONArray AsArray
{
get
{
JSONArray tmp = new JSONArray();
Set(tmp);
return tmp;
}
}
public override JSONClass AsObject
{
get
{
JSONClass tmp = new JSONClass();
Set(tmp);
return tmp;
}
}
} // End of JSONLazyCreator
public static class JSON
{
public static JSONNode Parse(string aJSON)
{
return JSONNode.Parse(aJSON);
}
}
}
| |
using System;
using System.Collections;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Text;
using System.Drawing.Drawing2D;
using Netron;
using System.Reflection;
using System.Windows.Forms;
namespace QuickGraph.Layout.Shapes
{
[Netron.Shape(
"Property Grid",
Description="A shape containing a table of key-value pairs",
Author="Jonathan de Halleux")]
[Serializable ]
public class PropertyGridShape : TitledRectangleShape
{
private bool collapseRows = true;
private Font rowFont = new Font("Tahoma",8f);
private Color rowColor = Color.Black;
private SizeF rowPadding = new SizeF(3,3);
private bool enableGrid = true;
private Color gridColor = Color.Gray;
private float gridWidth = 1;
private DashStyle gridStyle = DashStyle.Solid;
private ArrayList rows = new ArrayList();
private Connector west;
private Connector north;
private Connector south;
private Connector east;
private Connector southWest;
private Connector northWest;
private Connector southEast;
private Connector northEast;
// box sizes
private SizeF keySize;
private SizeF valueSize;
private SizeF rowSize;
private SizeF rowsSize;
public PropertyGridShape()
{
this.Resizable = false;
Assembly a = Assembly.GetExecutingAssembly();
this.west = new Netron.Connector(this, "West");
this.Connectors.Add(this.west);
this.east = new Netron.Connector(this, "East");
this.Connectors.Add(this.east);
this.north = new Netron.Connector(this, "North");
this.Connectors.Add(this.north);
this.south = new Netron.Connector(this, "South");
this.Connectors.Add(this.south);
this.southWest = new Netron.Connector(this, "SouthWest");
this.Connectors.Add(this.southWest);
this.northWest = new Netron.Connector(this, "NorthWest");
this.Connectors.Add(this.northWest);
this.southEast = new Netron.Connector(this, "SouthEast");
this.Connectors.Add(this.southEast);
this.northEast = new Netron.Connector(this, "NorthEast");
this.Connectors.Add(this.northEast);
}
[Category("Appearance")]
public bool CollapseRows
{
get { return this.collapseRows; }
set
{
if (this.collapseRows==value)
return;
this.SizeDirty = true;
this.collapseRows = value;
}
}
[Category("Appearance")]
public Color RowColor
{
get { return rowColor; }
set { rowColor = value; }
}
[Category("Appearance")]
public Font RowFont
{
get { return rowFont; }
set
{
if(value==rowFont)
return;
this.SizeDirty=true;
rowFont = value;
}
}
[Category("Appearance")]
public SizeF RowPadding
{
get { return rowPadding; }
set { rowPadding = value; }
}
[Category("Appearance")]
public bool EnableGrid
{
get { return enableGrid; }
set { enableGrid = value; }
}
[Category("Appearance")]
public Color GridColor
{
get { return gridColor; }
set { gridColor = value; }
}
[Category("Appearance")]
public float GridWidth
{
get { return gridWidth; }
set { gridWidth = value; }
}
[Category("Appearance")]
public DashStyle GridStyle
{
get { return gridStyle; }
set { gridStyle = value; }
}
[TypeConverter(typeof(ExpandableObjectConverter))]
public ArrayList Rows
{
get { return this.rows; }
set { this.rows = value; }
}
[Browsable(false)]
public override ArrayList MenuItems
{
get
{
ArrayList items = base.MenuItems;
return items;
}
}
public override void ResetColors()
{
base.ResetColors();
PropertyGridShape shape = new PropertyGridShape();
this.rowColor = shape.rowColor;
this.gridColor = shape.gridColor;
}
public override PointF ConnectionPoint(Netron.Connector c)
{
RectangleF r = Rectangle;
if (c == this.west)
return new PointF(r.Left, r.Top + r.Height/2);
if (c == this.east)
return new PointF(r.Right, r.Top + r.Height/2);
if (c == this.north)
return new PointF(r.Left+r.Width/2, r.Top);
if (c == this.south)
return new PointF(r.Left+r.Width/2, r.Bottom);
if (c==this.southWest)
return new PointF(r.Left, r.Bottom);
if (c==this.northWest)
return new PointF(r.Left, r.Top);
if (c==this.southEast)
return new PointF(r.Right, r.Bottom);
if (c==this.northEast)
return new PointF(r.Right, r.Top);
throw new Exception("Unknown connector");
}
public override void Paint(Graphics g)
{
if (this.SizeDirty)
{
FitSize(g);
}
RectangleF r = Rectangle;
DrawBack(g,r);
DrawTitle(g,r);
if (!this.collapseRows)
DrawRows(g,r);
if (this.EnableGrid)
DrawGrid(g,r);
if (this.EnableBorder)
DrawBorder(g,r);
base.Paint(g);
}
public override void FitSize(Graphics g)
{
// compute different box sizes
// title
base.FitSize(g);
if (this.collapseRows)
{
this.keySize = SizeF.Empty;
this.valueSize = SizeF.Empty;
this.rowSize = SizeF.Empty;
this.rowSize = SizeF.Empty;
return;
}
// rows
this.keySize = SizeF.Empty;
this.valueSize = SizeF.Empty;
SizeF temp;
foreach(PropertyEntry de in this.rows)
{
temp = g.MeasureString(de.Key,this.rowFont);
this.keySize = new SizeF(
Math.Max(this.keySize.Width,temp.Width),
Math.Max(this.keySize.Height,temp.Height)
);
temp = g.MeasureString(de.Value,this.rowFont);
this.valueSize = new SizeF(
Math.Max(this.valueSize.Width,temp.Width),
Math.Max(this.valueSize.Height,temp.Height)
);
}
// apply padding...
this.keySize.Width += this.rowPadding.Width*2;
this.keySize.Height += this.rowPadding.Height*2;
this.valueSize.Width += this.rowPadding.Width*2;
this.valueSize.Height += this.rowPadding.Height*2;
// computing the size of the key-value box
this.rowSize = new SizeF(
this.keySize.Width + this.valueSize.Width,
Math.Max(this.keySize.Height,this.valueSize.Height)
);
// updating title and rows
this.rowSize.Width = Math.Max(this.rowSize.Width, this.TitleSize.Width);
this.TitleSize = new SizeF(this.rowSize.Width,this.TitleSize.Height);
// rows
this.rowsSize = new SizeF(
this.rowSize.Width,this.rowSize.Height*this.rows.Count);
// adding the title
this.Size = new SizeF(
this.rowSize.Width,
this.TitleSize.Height + this.rowsSize.Height
);
}
protected virtual void DrawGrid(Graphics g, RectangleF r)
{
Pen pen = new Pen(this.GridColor,this.GridWidth);
pen.DashStyle = this.GridStyle;
// horizontal lines
float yCur = r.Top + this.TitleSize.Height + this.rowSize.Height;
for(int i = 0;i<this.rows.Count-1;++i)
{
g.DrawLine(pen,r.Left,yCur,r.Right,yCur);
yCur+=this.rowSize.Height;
}
// vertical
g.DrawLine(pen,
r.Left+this.keySize.Width,r.Bottom,
r.Left+this.keySize.Width,r.Top + this.TitleSize.Height );
}
protected virtual void DrawRows(Graphics g, RectangleF r)
{
SolidBrush brush = new SolidBrush(this.rowColor);
float yCur =
r.Top
+ this.TitleSize.Height
+ this.rowPadding.Height/2;
float xKey = r.Left + this.rowPadding.Width/2;
float xValue =r.Left + this.keySize.Width + this.rowPadding.Width;
foreach(PropertyEntry de in this.rows)
{
g.DrawString(
de.Key,
this.rowFont,
brush,
xKey,
yCur
);
g.DrawString(
de.Value,
this.rowFont,
brush,
xValue,
yCur
);
yCur += this.rowSize.Height;
}
}
}
}
| |
// Copyright (c) MOSA Project. Licensed under the New BSD License.
using Mosa.Compiler.Common;
using Mosa.Compiler.Framework;
using Mosa.Compiler.Linker;
using Mosa.Compiler.Linker.Elf32;
using Mosa.Compiler.Linker.PE;
using Mosa.Compiler.Trace.BuiltIn;
using Mosa.Utility.Aot;
using NDesk.Options;
using System;
using System.Collections.Generic;
using System.IO;
using System.Text;
namespace Mosa.Tool.Compiler
{
/// <summary>
/// Class containing the Compiler.
/// </summary>
public class Compiler
{
#region Data
protected MosaCompiler compiler = new MosaCompiler();
/// <summary>
/// Holds a list of input files.
/// </summary>
private List<FileInfo> inputFiles;
/// <summary>
/// Determines if the file is executable.
/// </summary>
private bool isExecutable;
/// <summary>
/// Holds a reference to the OptionSet used for option parsing.
/// </summary>
private OptionSet optionSet;
private readonly int majorVersion = 1;
private readonly int minorVersion = 4;
private readonly string codeName = "Neptune";
/// <summary>
/// A string holding a simple usage description.
/// </summary>
private readonly string usageString;
#endregion Data
#region Constructors
/// <summary>
/// Initializes a new instance of the Compiler class.
/// </summary>
public Compiler()
{
compiler.CompilerFactory = delegate { return new AotCompiler(); };
usageString = "Usage: mosacl -o outputfile --Architecture=[x86|avr32] --format=[ELF32|ELF64|PE] {--boot=[mb0.7]} {additional options} inputfiles";
optionSet = new OptionSet();
inputFiles = new List<FileInfo>();
#region Setup general options
optionSet.Add(
"local|version",
"Display version information.",
delegate (string v)
{
if (v != null)
{
// only show header and exit
Environment.Exit(0);
}
});
optionSet.Add(
"h|?|help",
"Display the full set of available options.",
delegate (string v)
{
if (v != null)
{
this.ShowHelp();
Environment.Exit(0);
}
});
// default option handler for input files
optionSet.Add(
"<>",
"Input files.",
delegate (string v)
{
if (!File.Exists(v))
{
throw new OptionException(String.Format("Input file or option '{0}' doesn't exist.", v), String.Empty);
}
FileInfo file = new FileInfo(v);
if (file.Extension.ToLower() == ".exe")
{
if (isExecutable)
{
// there are more than one exe files in the list
throw new OptionException("Multiple executables aren't allowed.", String.Empty);
}
isExecutable = true;
}
inputFiles.Add(file);
});
#endregion Setup general options
#region Setup options
optionSet.Add(
"b|boot=",
"Specify the bootable format of the produced binary [{mb0.7}].",
delegate (string format)
{
compiler.CompilerOptions.BootStageFactory = GetBootStageFactory(format);
}
);
optionSet.Add(
"a|Architecture=",
"Select one of the MOSA architectures to compile for [{x86|ARMv6}].",
delegate (string arch)
{
compiler.CompilerOptions.Architecture = SelectArchitecture(arch);
}
);
optionSet.Add(
"f|format=",
"Select the format of the binary file to create [{ELF32|ELF64|PE}].",
delegate (string format)
{
compiler.CompilerOptions.LinkerFactory = GetLinkerFactory(format);
if (compiler.CompilerOptions.LinkerFactory == null)
throw new OptionException("Invalid value Linker format: " + format, "format");
}
);
optionSet.Add(
"o|out=",
"The name of the output {file}.",
delegate (string file)
{
compiler.CompilerOptions.OutputFile = file;
}
);
optionSet.Add(
"map=",
"Generate a map {file} of the produced binary.",
delegate (string file)
{
compiler.CompilerOptions.MapFile = file;
}
);
optionSet.Add(
@"sa|enable-static-alloc",
@"Performs static allocations at compile time.",
enable => compiler.CompilerOptions.EnableStaticAllocations = enable != null
);
optionSet.Add(
@"ssa|enable-single-static-assignment-form",
@"Performs single static assignments at compile time.",
enable => compiler.CompilerOptions.EnableSSA = enable != null
);
optionSet.Add(
@"optimize|enable-optimizations|ssa-optimize",
@"Performs single static assignments optimizations.",
enable => compiler.CompilerOptions.EnableOptimizations = enable != null
);
optionSet.Add(
@"promote-variables|enable-variable-promotion",
@"Enables variable promotion optimization.",
enable => compiler.CompilerOptions.EnableVariablePromotion = enable != null
);
optionSet.Add(
"base-address=",
"Specify the {base address}.",
delegate (string v)
{
uint val;
if (uint.TryParse(v, out val))
{
compiler.CompilerOptions.BaseAddress = val;
}
else
{
throw new OptionException("Invalid value for base address: " + v, "base-address");
}
}
);
#endregion Setup options
}
#endregion Constructors
#region Public Methods
/// <summary>
/// Runs the command line parser and the compilation process.
/// </summary>
/// <param name="args">The command line arguments.</param>
public void Run(string[] args)
{
// always print header with version information
Console.WriteLine("MOSA AOT Compiler, Version {0}.{1} '{2}'", majorVersion, minorVersion, codeName);
Console.WriteLine("Copyright 2015 by the MOSA Project. Licensed under the New BSD License.");
Console.WriteLine("Copyright 2008 by Novell. NDesk.Options is released under the MIT/X11 license.");
Console.WriteLine();
Console.WriteLine("Parsing options...");
try
{
if (args == null || args.Length == 0)
{
// no arguments are specified
ShowShortHelp();
return;
}
optionSet.Parse(args);
if (inputFiles.Count == 0)
{
throw new OptionException("No input file(s) specified.", String.Empty);
}
// Process boot format:
// Boot format only matters if it's an executable
// Process this only now, because input files must be known
if (!isExecutable && compiler.CompilerOptions.BootStageFactory != null)
{
Console.WriteLine("Warning: Ignoring boot format, because target is not an executable.");
Console.WriteLine();
}
// Check for missing options
if (compiler.CompilerOptions.LinkerFactory == null)
{
throw new OptionException("No binary format specified.", "format");
}
if (String.IsNullOrEmpty(compiler.CompilerOptions.OutputFile))
{
throw new OptionException("No output file specified.", "o");
}
if (compiler.CompilerOptions.Architecture == null)
{
throw new OptionException("No Architecture specified.", "Architecture");
}
}
catch (OptionException e)
{
ShowError(e.Message);
return;
}
Console.WriteLine(this.ToString());
Console.WriteLine("Compiling ...");
DateTime start = DateTime.Now;
try
{
Compile();
}
catch (CompilerException ce)
{
this.ShowError(ce.Message);
}
DateTime end = DateTime.Now;
TimeSpan time = end - start;
Console.WriteLine();
Console.WriteLine("Compilation time: " + time);
}
/// <summary>
/// Returns a string representation of the current options.
/// </summary>
/// <returns>A string containing the options.</returns>
public override string ToString()
{
StringBuilder sb = new StringBuilder();
sb.Append(" > Output file: ").AppendLine(compiler.CompilerOptions.OutputFile);
sb.Append(" > Input file(s): ").AppendLine(String.Join(", ", new List<string>(GetInputFileNames()).ToArray()));
sb.Append(" > Architecture: ").AppendLine(compiler.CompilerOptions.Architecture.GetType().FullName);
sb.Append(" > Binary format: ").AppendLine(compiler.CompilerOptions.LinkerFactory().GetType().FullName);
sb.Append(" > Boot format: ").AppendLine((compiler.CompilerOptions.BootStageFactory == null) ? "None" : ((IPipelineStage)compiler.CompilerOptions.BootStageFactory()).Name);
sb.Append(" > Is executable: ").AppendLine(isExecutable.ToString());
return sb.ToString();
}
#endregion Public Methods
#region Private Methods
private void Compile()
{
compiler.CompilerTrace.TraceListener = new ConsoleEventListener();
compiler.Load(inputFiles);
compiler.Execute(Environment.ProcessorCount);
}
/// <summary>
/// Gets a list of input file names.
/// </summary>
private IEnumerable<string> GetInputFileNames()
{
foreach (FileInfo file in inputFiles)
yield return file.FullName;
}
/// <summary>
/// Shows an error and a short information text.
/// </summary>
/// <param name="message">The error message to show.</param>
private void ShowError(string message)
{
Console.WriteLine(usageString);
Console.WriteLine();
Console.Write("Error: ");
Console.WriteLine(message);
Console.WriteLine();
Console.WriteLine("Execute 'mosacl --help' for more information.");
Console.WriteLine();
}
/// <summary>
/// Shows a short help text pointing to the '--help' option.
/// </summary>
private void ShowShortHelp()
{
Console.WriteLine(usageString);
Console.WriteLine();
Console.WriteLine("Execute 'mosacl --help' for more information.");
}
/// <summary>
/// Shows the full help containing descriptions for all possible options.
/// </summary>
private void ShowHelp()
{
Console.WriteLine(usageString);
Console.WriteLine();
Console.WriteLine("Options:");
this.optionSet.WriteOptionDescriptions(Console.Out);
}
#endregion Private Methods
#region Internal Methods
/// <summary>
/// Selects the architecture.
/// </summary>
/// <param name="architecture">The architecture.</param>
/// <returns></returns>
private static BaseArchitecture SelectArchitecture(string architecture)
{
switch (architecture.ToLower())
{
case "x86": return Mosa.Platform.x86.Architecture.CreateArchitecture(Mosa.Platform.x86.ArchitectureFeatureFlags.AutoDetect);
default: throw new NotImplementCompilerException(String.Format("Unknown or unsupported Architecture {0}.", architecture));
}
}
private static Func<ICompilerStage> GetBootStageFactory(string format)
{
switch (format.ToLower())
{
case "multibootHeader-0.7":
case "mb0.7": return delegate { return new Mosa.Platform.x86.Stages.Multiboot0695Stage(); };
default: throw new NotImplementCompilerException(String.Format("Unknown or unsupported boot format {0}.", format));
}
}
private static Func<BaseLinker> GetLinkerFactory(string format)
{
switch (format.ToLower())
{
case "pe": return delegate { return new PELinker(); };
case "elf": return delegate { return new Elf32(); };
case "elf32": return delegate { return new Elf32(); };
//case "elf64": return delegate { return new Elf64Linker(); };
default: return null;
}
}
#endregion Internal Methods
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Input.InputMethod.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Input
{
public partial class InputMethod : System.Windows.Threading.DispatcherObject
{
#region Methods and constructors
public static InputScope GetInputScope(System.Windows.DependencyObject target)
{
return default(InputScope);
}
public static bool GetIsInputMethodEnabled(System.Windows.DependencyObject target)
{
return default(bool);
}
public static bool GetIsInputMethodSuspended(System.Windows.DependencyObject target)
{
return default(bool);
}
public static ImeConversionModeValues GetPreferredImeConversionMode(System.Windows.DependencyObject target)
{
return default(ImeConversionModeValues);
}
public static ImeSentenceModeValues GetPreferredImeSentenceMode(System.Windows.DependencyObject target)
{
return default(ImeSentenceModeValues);
}
public static InputMethodState GetPreferredImeState(System.Windows.DependencyObject target)
{
return default(InputMethodState);
}
internal InputMethod()
{
}
public static void SetInputScope(System.Windows.DependencyObject target, InputScope value)
{
}
public static void SetIsInputMethodEnabled(System.Windows.DependencyObject target, bool value)
{
}
public static void SetIsInputMethodSuspended(System.Windows.DependencyObject target, bool value)
{
}
public static void SetPreferredImeConversionMode(System.Windows.DependencyObject target, ImeConversionModeValues value)
{
}
public static void SetPreferredImeSentenceMode(System.Windows.DependencyObject target, ImeSentenceModeValues value)
{
}
public static void SetPreferredImeState(System.Windows.DependencyObject target, InputMethodState value)
{
}
public void ShowConfigureUI()
{
}
public void ShowConfigureUI(System.Windows.UIElement element)
{
}
public void ShowRegisterWordUI(System.Windows.UIElement element, string registeredText)
{
}
public void ShowRegisterWordUI(string registeredText)
{
}
public void ShowRegisterWordUI()
{
}
#endregion
#region Properties and indexers
public bool CanShowConfigurationUI
{
get
{
return default(bool);
}
}
public bool CanShowRegisterWordUI
{
get
{
return default(bool);
}
}
public static System.Windows.Input.InputMethod Current
{
get
{
return default(System.Windows.Input.InputMethod);
}
}
public InputMethodState HandwritingState
{
get
{
return default(InputMethodState);
}
set
{
}
}
public ImeConversionModeValues ImeConversionMode
{
get
{
return default(ImeConversionModeValues);
}
set
{
}
}
public ImeSentenceModeValues ImeSentenceMode
{
get
{
return default(ImeSentenceModeValues);
}
set
{
}
}
public InputMethodState ImeState
{
get
{
return default(InputMethodState);
}
set
{
}
}
public InputMethodState MicrophoneState
{
get
{
return default(InputMethodState);
}
set
{
}
}
public SpeechMode SpeechMode
{
get
{
return default(SpeechMode);
}
set
{
}
}
#endregion
#region Events
public event InputMethodStateChangedEventHandler StateChanged
{
add
{
}
remove
{
}
}
#endregion
#region Fields
public readonly static System.Windows.DependencyProperty InputScopeProperty;
public readonly static System.Windows.DependencyProperty IsInputMethodEnabledProperty;
public readonly static System.Windows.DependencyProperty IsInputMethodSuspendedProperty;
public readonly static System.Windows.DependencyProperty PreferredImeConversionModeProperty;
public readonly static System.Windows.DependencyProperty PreferredImeSentenceModeProperty;
public readonly static System.Windows.DependencyProperty PreferredImeStateProperty;
#endregion
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using Microsoft.Azure.Management.Compute.Models;
using System;
using System.Collections;
using System.Linq;
using System.Management.Automation;
namespace Microsoft.Azure.Commands.Compute.Automation
{
[Cmdlet("New", "AzureRmVmssConfig")]
[OutputType(typeof(VirtualMachineScaleSet))]
public class NewAzureRmVmssConfigCommand : Microsoft.Azure.Commands.ResourceManager.Common.AzureRMCmdlet
{
[Parameter(
Mandatory = false,
Position = 0,
ValueFromPipelineByPropertyName = true)]
public string ProvisioningState { get; set; }
[Parameter(
Mandatory = false,
Position = 1,
ValueFromPipelineByPropertyName = true)]
public bool? OverProvision { get; set; }
[Parameter(
Mandatory = false,
Position = 2,
ValueFromPipelineByPropertyName = true)]
public string Location { get; set; }
[Parameter(
Mandatory = false,
Position = 3,
ValueFromPipelineByPropertyName = true)]
public Hashtable Tag { get; set; }
[Parameter(
Mandatory = false,
Position = 4,
ValueFromPipelineByPropertyName = true)]
public string SkuName { get; set; }
[Parameter(
Mandatory = false,
Position = 5,
ValueFromPipelineByPropertyName = true)]
public string SkuTier { get; set; }
[Parameter(
Mandatory = false,
Position = 6,
ValueFromPipelineByPropertyName = true)]
public Int64? SkuCapacity { get; set; }
[Parameter(
Mandatory = false,
Position = 7,
ValueFromPipelineByPropertyName = true)]
public UpgradeMode? UpgradePolicyMode { get; set; }
[Parameter(
Mandatory = false,
Position = 8,
ValueFromPipelineByPropertyName = true)]
public VirtualMachineScaleSetOSProfile OsProfile { get; set; }
[Parameter(
Mandatory = false,
Position = 9,
ValueFromPipelineByPropertyName = true)]
public VirtualMachineScaleSetStorageProfile StorageProfile { get; set; }
[Parameter(
Mandatory = false,
Position = 10,
ValueFromPipelineByPropertyName = true)]
public VirtualMachineScaleSetNetworkConfiguration[] NetworkInterfaceConfiguration { get; set; }
[Parameter(
Mandatory = false,
Position = 11,
ValueFromPipelineByPropertyName = true)]
public VirtualMachineScaleSetExtension[] Extension { get; set; }
protected override void ProcessRecord()
{
// Sku
Microsoft.Azure.Management.Compute.Models.Sku vSku = null;
// UpgradePolicy
Microsoft.Azure.Management.Compute.Models.UpgradePolicy vUpgradePolicy = null;
// VirtualMachineProfile
Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetVMProfile vVirtualMachineProfile = null;
if (this.SkuName != null)
{
if (vSku == null)
{
vSku = new Microsoft.Azure.Management.Compute.Models.Sku();
}
vSku.Name = this.SkuName;
}
if (this.SkuTier != null)
{
if (vSku == null)
{
vSku = new Microsoft.Azure.Management.Compute.Models.Sku();
}
vSku.Tier = this.SkuTier;
}
if (this.SkuCapacity != null)
{
if (vSku == null)
{
vSku = new Microsoft.Azure.Management.Compute.Models.Sku();
}
vSku.Capacity = this.SkuCapacity;
}
if (this.UpgradePolicyMode != null)
{
if (vUpgradePolicy == null)
{
vUpgradePolicy = new Microsoft.Azure.Management.Compute.Models.UpgradePolicy();
}
vUpgradePolicy.Mode = this.UpgradePolicyMode;
}
if (this.OsProfile != null)
{
if (vVirtualMachineProfile == null)
{
vVirtualMachineProfile = new Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetVMProfile();
}
vVirtualMachineProfile.OsProfile = this.OsProfile;
}
if (this.StorageProfile != null)
{
if (vVirtualMachineProfile == null)
{
vVirtualMachineProfile = new Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetVMProfile();
}
vVirtualMachineProfile.StorageProfile = this.StorageProfile;
}
if (this.NetworkInterfaceConfiguration != null)
{
if (vVirtualMachineProfile == null)
{
vVirtualMachineProfile = new Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetVMProfile();
}
if (vVirtualMachineProfile.NetworkProfile == null)
{
vVirtualMachineProfile.NetworkProfile = new Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetNetworkProfile();
}
vVirtualMachineProfile.NetworkProfile.NetworkInterfaceConfigurations = this.NetworkInterfaceConfiguration;
}
if (this.Extension != null)
{
if (vVirtualMachineProfile == null)
{
vVirtualMachineProfile = new Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetVMProfile();
}
if (vVirtualMachineProfile.ExtensionProfile == null)
{
vVirtualMachineProfile.ExtensionProfile = new Microsoft.Azure.Management.Compute.Models.VirtualMachineScaleSetExtensionProfile();
}
vVirtualMachineProfile.ExtensionProfile.Extensions = this.Extension;
}
var vVirtualMachineScaleSet = new VirtualMachineScaleSet
{
ProvisioningState = this.ProvisioningState,
OverProvision = this.OverProvision,
Location = this.Location,
Tags = (this.Tag == null) ? null : this.Tag.Cast<DictionaryEntry>().ToDictionary(ht => (string)ht.Key, ht => (string)ht.Value),
Sku = vSku,
UpgradePolicy = vUpgradePolicy,
VirtualMachineProfile = vVirtualMachineProfile,
};
WriteObject(vVirtualMachineScaleSet);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using System.Globalization;
namespace System.Xml
{
/// <summary>
/// Contains various static functions and methods for parsing and validating:
/// NCName (not namespace-aware, no colons allowed)
/// QName (prefix:local-name)
/// </summary>
internal static class ValidateNames
{
internal enum Flags
{
NCNames = 0x1, // Validate that each non-empty prefix and localName is a valid NCName
CheckLocalName = 0x2, // Validate the local-name
CheckPrefixMapping = 0x4, // Validate the prefix --> namespace mapping
All = 0x7,
AllExceptNCNames = 0x6,
AllExceptPrefixMapping = 0x3,
};
static XmlCharType xmlCharType = XmlCharType.Instance;
//-----------------------------------------------
// Nmtoken parsing
//-----------------------------------------------
/// <summary>
/// Attempts to parse the input string as an Nmtoken (see the XML spec production [7] && XML Namespaces spec).
/// Quits parsing when an invalid Nmtoken char is reached or the end of string is reached.
/// Returns the number of valid Nmtoken chars that were parsed.
/// </summary>
internal static unsafe int ParseNmtoken(string s, int offset)
{
Debug.Assert(s != null && offset <= s.Length);
// Keep parsing until the end of string or an invalid NCName character is reached
int i = offset;
while (i < s.Length)
{
if ((xmlCharType.charProperties[s[i]] & XmlCharType.fNCNameSC) != 0)
{
i++;
}
#if XML10_FIFTH_EDITION
else if (xmlCharType.IsNCNameSurrogateChar(s, i))
{
i += 2;
}
#endif
else
{
break;
}
}
return i - offset;
}
//-----------------------------------------------
// Nmtoken parsing (no XML namespaces support)
//-----------------------------------------------
/// <summary>
/// Attempts to parse the input string as an Nmtoken (see the XML spec production [7]) without taking
/// into account the XML Namespaces spec. What it means is that the ':' character is allowed at any
/// position and any number of times in the token.
/// Quits parsing when an invalid Nmtoken char is reached or the end of string is reached.
/// Returns the number of valid Nmtoken chars that were parsed.
/// </summary>
internal static unsafe int ParseNmtokenNoNamespaces(string s, int offset)
{
Debug.Assert(s != null && offset <= s.Length);
// Keep parsing until the end of string or an invalid Name character is reached
int i = offset;
while (i < s.Length)
{
if ((xmlCharType.charProperties[s[i]] & XmlCharType.fNCNameSC) != 0 || s[i] == ':')
{ // if (xmlCharType.IsNameSingleChar(s[i])) {
i++;
}
#if XML10_FIFTH_EDITION
else if (xmlCharType.IsNCNameSurrogateChar(s, i))
{
i += 2;
}
#endif
else
{
break;
}
}
return i - offset;
}
// helper methods
internal static bool IsNmtokenNoNamespaces(string s)
{
int endPos = ParseNmtokenNoNamespaces(s, 0);
return endPos > 0 && endPos == s.Length;
}
//-----------------------------------------------
// Name parsing (no XML namespaces support)
//-----------------------------------------------
/// <summary>
/// Attempts to parse the input string as a Name without taking into account the XML Namespaces spec.
/// What it means is that the ':' character does not delimiter prefix and local name, but it is a regular
/// name character, which is allowed to appear at any position and any number of times in the name.
/// Quits parsing when an invalid Name char is reached or the end of string is reached.
/// Returns the number of valid Name chars that were parsed.
/// </summary>
internal static unsafe int ParseNameNoNamespaces(string s, int offset)
{
Debug.Assert(s != null && offset <= s.Length);
// Quit if the first character is not a valid NCName starting character
int i = offset;
if (i < s.Length)
{
if ((xmlCharType.charProperties[s[i]] & XmlCharType.fNCStartNameSC) != 0 || s[i] == ':')
{
i++;
}
#if XML10_FIFTH_EDITION
else if (xmlCharType.IsNCNameSurrogateChar(s, i))
{
i += 2;
}
#endif
else
{
return 0; // no valid StartNCName char
}
// Keep parsing until the end of string or an invalid NCName character is reached
while (i < s.Length)
{
if ((xmlCharType.charProperties[s[i]] & XmlCharType.fNCNameSC) != 0 || s[i] == ':')
{
i++;
}
#if XML10_FIFTH_EDITION
else if (xmlCharType.IsNCNameSurrogateChar(s, i))
{
i += 2;
}
#endif
else
{
break;
}
}
}
return i - offset;
}
// helper methods
internal static bool IsNameNoNamespaces(string s)
{
int endPos = ParseNameNoNamespaces(s, 0);
return endPos > 0 && endPos == s.Length;
}
//-----------------------------------------------
// NCName parsing
//-----------------------------------------------
/// <summary>
/// Attempts to parse the input string as an NCName (see the XML Namespace spec).
/// Quits parsing when an invalid NCName char is reached or the end of string is reached.
/// Returns the number of valid NCName chars that were parsed.
/// </summary>
internal static unsafe int ParseNCName(string s, int offset)
{
Debug.Assert(s != null && offset <= s.Length);
// Quit if the first character is not a valid NCName starting character
int i = offset;
if (i < s.Length)
{
if ((xmlCharType.charProperties[s[i]] & XmlCharType.fNCStartNameSC) != 0)
{
i++;
}
#if XML10_FIFTH_EDITION
else if (xmlCharType.IsNCNameSurrogateChar(s, i))
{
i += 2;
}
#endif
else
{
return 0; // no valid StartNCName char
}
// Keep parsing until the end of string or an invalid NCName character is reached
while (i < s.Length)
{
if ((xmlCharType.charProperties[s[i]] & XmlCharType.fNCNameSC) != 0)
{
i++;
}
#if XML10_FIFTH_EDITION
else if (xmlCharType.IsNCNameSurrogateChar(s, i))
{
i += 2;
}
#endif
else
{
break;
}
}
}
return i - offset;
}
internal static int ParseNCName(string s)
{
return ParseNCName(s, 0);
}
//-----------------------------------------------
// QName parsing
//-----------------------------------------------
/// <summary>
/// Attempts to parse the input string as a QName (see the XML Namespace spec).
/// Quits parsing when an invalid QName char is reached or the end of string is reached.
/// Returns the number of valid QName chars that were parsed.
/// Sets colonOffset to the offset of a colon character if it exists, or 0 otherwise.
/// </summary>
internal static int ParseQName(string s, int offset, out int colonOffset)
{
// Assume no colon
colonOffset = 0;
// Parse NCName (may be prefix, may be local name)
int len = ParseNCName(s, offset);
if (len != 0)
{
// Non-empty NCName, so look for colon if there are any characters left
offset += len;
if (offset < s.Length && s[offset] == ':')
{
// First NCName was prefix, so look for local name part
int lenLocal = ParseNCName(s, offset + 1);
if (lenLocal != 0)
{
// Local name part found, so increase total QName length (add 1 for colon)
colonOffset = offset;
len += lenLocal + 1;
}
}
}
return len;
}
/// <summary>
/// Calls parseQName and throws exception if the resulting name is not a valid QName.
/// Returns the prefix and local name parts.
/// </summary>
internal static void ParseQNameThrow(string s, out string prefix, out string localName)
{
int colonOffset;
int len = ParseQName(s, 0, out colonOffset);
if (len == 0 || len != s.Length)
{
// If the string is not a valid QName, then throw
ThrowInvalidName(s, 0, len);
}
if (colonOffset != 0)
{
prefix = s.Substring(0, colonOffset);
localName = s.Substring(colonOffset + 1);
}
else
{
prefix = "";
localName = s;
}
}
/// <summary>
/// Throws an invalid name exception.
/// </summary>
/// <param name="s">String that was parsed.</param>
/// <param name="offsetStartChar">Offset in string where parsing began.</param>
/// <param name="offsetBadChar">Offset in string where parsing failed.</param>
internal static void ThrowInvalidName(string s, int offsetStartChar, int offsetBadChar)
{
// If the name is empty, throw an exception
if (offsetStartChar >= s.Length)
throw new XmlException(SR.Xml_EmptyName);
Debug.Assert(offsetBadChar < s.Length);
if (xmlCharType.IsNCNameSingleChar(s[offsetBadChar]) && !XmlCharType.Instance.IsStartNCNameSingleChar(s[offsetBadChar]))
{
// The error character is a valid name character, but is not a valid start name character
throw new XmlException(SR.Format(SR.Xml_BadStartNameChar, XmlExceptionHelper.BuildCharExceptionArgs(s, offsetBadChar)));
}
else
{
// The error character is an invalid name character
throw new XmlException(SR.Format(SR.Xml_BadNameChar, XmlExceptionHelper.BuildCharExceptionArgs(s, offsetBadChar)));
}
}
/// <summary>
/// Split a QualifiedName into prefix and localname, w/o any checking.
/// (Used for XmlReader/XPathNavigator MoveTo(name) methods)
/// </summary>
internal static void SplitQName(string name, out string prefix, out string lname)
{
int colonPos = name.IndexOf(':');
if (-1 == colonPos)
{
prefix = string.Empty;
lname = name;
}
else if (0 == colonPos || (name.Length - 1) == colonPos)
{
throw new ArgumentException(SR.Format(SR.Xml_BadNameChar, XmlExceptionHelper.BuildCharExceptionArgs(':', '\0')), "name");
}
else
{
prefix = name.Substring(0, colonPos);
colonPos++; // move after colon
lname = name.Substring(colonPos, name.Length - colonPos);
}
}
}
internal class XmlExceptionHelper
{
internal static string[] BuildCharExceptionArgs(string data, int invCharIndex)
{
return BuildCharExceptionArgs(data[invCharIndex], invCharIndex + 1 < data.Length ? data[invCharIndex + 1] : '\0');
}
internal static string[] BuildCharExceptionArgs(char[] data, int invCharIndex)
{
return BuildCharExceptionArgs(data, data.Length, invCharIndex);
}
internal static string[] BuildCharExceptionArgs(char[] data, int length, int invCharIndex)
{
Debug.Assert(invCharIndex < data.Length);
Debug.Assert(invCharIndex < length);
Debug.Assert(length <= data.Length);
return BuildCharExceptionArgs(data[invCharIndex], invCharIndex + 1 < length ? data[invCharIndex + 1] : '\0');
}
internal static string[] BuildCharExceptionArgs(char invChar, char nextChar)
{
string[] aStringList = new string[2];
// for surrogate characters include both high and low char in the message so that a full character is displayed
if (XmlCharType.IsHighSurrogate(invChar) && nextChar != 0)
{
int combinedChar = XmlCharType.CombineSurrogateChar(nextChar, invChar);
aStringList[0] = new string(new char[] { invChar, nextChar });
aStringList[1] = string.Format(CultureInfo.InvariantCulture, "0x{0:X2}", combinedChar);
}
else
{
// don't include 0 character in the string - in means eof-of-string in native code, where this may bubble up to
if ((int)invChar == 0)
{
aStringList[0] = ".";
}
else
{
aStringList[0] = Convert.ToString(invChar, CultureInfo.InvariantCulture);
}
aStringList[1] = string.Format(CultureInfo.InvariantCulture, "0x{0:X2}", (int)invChar);
}
return aStringList;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Xml.Schema
{
using System;
using System.ComponentModel;
using System.Xml.Serialization;
using System.Xml.Schema;
using System.Xml.XPath;
using System.Diagnostics;
using System.Collections;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Globalization;
/// <include file='doc\XmlSchemaFacet.uex' path='docs/doc[@for="XmlSchemaFacet"]/*' />
internal abstract class FacetsChecker
{
private struct FacetsCompiler
{
private DatatypeImplementation _datatype;
private RestrictionFacets _derivedRestriction;
private RestrictionFlags _baseFlags;
private RestrictionFlags _baseFixedFlags;
private RestrictionFlags _validRestrictionFlags;
//Helpers
private XmlSchemaDatatype _nonNegativeInt;
private XmlSchemaDatatype _builtInType;
private XmlTypeCode _builtInEnum;
private bool _firstPattern;
private StringBuilder _regStr;
private XmlSchemaPatternFacet _pattern_facet;
public FacetsCompiler(DatatypeImplementation baseDatatype, RestrictionFacets restriction)
{
_firstPattern = true;
_regStr = null;
_pattern_facet = null;
_datatype = baseDatatype;
_derivedRestriction = restriction;
_baseFlags = _datatype.Restriction != null ? _datatype.Restriction.Flags : 0;
_baseFixedFlags = _datatype.Restriction != null ? _datatype.Restriction.FixedFlags : 0;
_validRestrictionFlags = _datatype.ValidRestrictionFlags;
_nonNegativeInt = DatatypeImplementation.GetSimpleTypeFromTypeCode(XmlTypeCode.NonNegativeInteger).Datatype;
_builtInEnum = !(_datatype is Datatype_union || _datatype is Datatype_List) ? _datatype.TypeCode : 0;
_builtInType = (int)_builtInEnum > 0 ? DatatypeImplementation.GetSimpleTypeFromTypeCode(_builtInEnum).Datatype : _datatype;
}
internal void CompileLengthFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.Length, SR.Sch_LengthFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.Length, SR.Sch_DupLengthFacet);
_derivedRestriction.Length = XmlBaseConverter.DecimalToInt32((decimal)ParseFacetValue(_nonNegativeInt, facet, SR.Sch_LengthFacetInvalid, null, null));
if ((_baseFixedFlags & RestrictionFlags.Length) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.Length, _derivedRestriction.Length))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
if ((_baseFlags & RestrictionFlags.Length) != 0)
{
if (_datatype.Restriction.Length < _derivedRestriction.Length)
{
throw new XmlSchemaException(SR.Sch_LengthGtBaseLength, facet);
}
}
// If the base has the MinLength facet, check that our derived length is not violating it
if ((_baseFlags & RestrictionFlags.MinLength) != 0)
{
if (_datatype.Restriction.MinLength > _derivedRestriction.Length)
{
throw new XmlSchemaException(SR.Sch_MaxMinLengthBaseLength, facet);
}
}
// If the base has the MaxLength facet, check that our derived length is not violating it
if ((_baseFlags & RestrictionFlags.MaxLength) != 0)
{
if (_datatype.Restriction.MaxLength < _derivedRestriction.Length)
{
throw new XmlSchemaException(SR.Sch_MaxMinLengthBaseLength, facet);
}
}
SetFlag(facet, RestrictionFlags.Length);
}
internal void CompileMinLengthFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.MinLength, SR.Sch_MinLengthFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.MinLength, SR.Sch_DupMinLengthFacet);
_derivedRestriction.MinLength = XmlBaseConverter.DecimalToInt32((decimal)ParseFacetValue(_nonNegativeInt, facet, SR.Sch_MinLengthFacetInvalid, null, null));
if ((_baseFixedFlags & RestrictionFlags.MinLength) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.MinLength, _derivedRestriction.MinLength))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
if ((_baseFlags & RestrictionFlags.MinLength) != 0)
{
if (_datatype.Restriction.MinLength > _derivedRestriction.MinLength)
{
throw new XmlSchemaException(SR.Sch_MinLengthGtBaseMinLength, facet);
}
}
if ((_baseFlags & RestrictionFlags.Length) != 0)
{
if (_datatype.Restriction.Length < _derivedRestriction.MinLength)
{
throw new XmlSchemaException(SR.Sch_MaxMinLengthBaseLength, facet);
}
}
SetFlag(facet, RestrictionFlags.MinLength);
}
internal void CompileMaxLengthFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.MaxLength, SR.Sch_MaxLengthFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.MaxLength, SR.Sch_DupMaxLengthFacet);
_derivedRestriction.MaxLength = XmlBaseConverter.DecimalToInt32((decimal)ParseFacetValue(_nonNegativeInt, facet, SR.Sch_MaxLengthFacetInvalid, null, null));
if ((_baseFixedFlags & RestrictionFlags.MaxLength) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.MaxLength, _derivedRestriction.MaxLength))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
if ((_baseFlags & RestrictionFlags.MaxLength) != 0)
{
if (_datatype.Restriction.MaxLength < _derivedRestriction.MaxLength)
{
throw new XmlSchemaException(SR.Sch_MaxLengthGtBaseMaxLength, facet);
}
}
if ((_baseFlags & RestrictionFlags.Length) != 0)
{
if (_datatype.Restriction.Length > _derivedRestriction.MaxLength)
{
throw new XmlSchemaException(SR.Sch_MaxMinLengthBaseLength, facet);
}
}
SetFlag(facet, RestrictionFlags.MaxLength);
}
internal void CompilePatternFacet(XmlSchemaPatternFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.Pattern, SR.Sch_PatternFacetProhibited);
if (_firstPattern == true)
{
_regStr = new StringBuilder();
_regStr.Append("(");
_regStr.Append(facet.Value);
_pattern_facet = facet;
_firstPattern = false;
}
else
{
_regStr.Append(")|(");
_regStr.Append(facet.Value);
}
SetFlag(facet, RestrictionFlags.Pattern);
}
internal void CompileEnumerationFacet(XmlSchemaFacet facet, IXmlNamespaceResolver nsmgr, XmlNameTable nameTable)
{
CheckProhibitedFlag(facet, RestrictionFlags.Enumeration, SR.Sch_EnumerationFacetProhibited);
if (_derivedRestriction.Enumeration == null)
{
_derivedRestriction.Enumeration = new ArrayList();
}
_derivedRestriction.Enumeration.Add(ParseFacetValue(_datatype, facet, SR.Sch_EnumerationFacetInvalid, nsmgr, nameTable));
SetFlag(facet, RestrictionFlags.Enumeration);
}
internal void CompileWhitespaceFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.WhiteSpace, SR.Sch_WhiteSpaceFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.WhiteSpace, SR.Sch_DupWhiteSpaceFacet);
if (facet.Value == "preserve")
{
_derivedRestriction.WhiteSpace = XmlSchemaWhiteSpace.Preserve;
}
else if (facet.Value == "replace")
{
_derivedRestriction.WhiteSpace = XmlSchemaWhiteSpace.Replace;
}
else if (facet.Value == "collapse")
{
_derivedRestriction.WhiteSpace = XmlSchemaWhiteSpace.Collapse;
}
else
{
throw new XmlSchemaException(SR.Sch_InvalidWhiteSpace, facet.Value, facet);
}
if ((_baseFixedFlags & RestrictionFlags.WhiteSpace) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.WhiteSpace, _derivedRestriction.WhiteSpace))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
//Check base and derived whitespace facets
XmlSchemaWhiteSpace baseWhitespace;
if ((_baseFlags & RestrictionFlags.WhiteSpace) != 0)
{
baseWhitespace = _datatype.Restriction.WhiteSpace;
}
else
{
baseWhitespace = _datatype.BuiltInWhitespaceFacet;
}
if (baseWhitespace == XmlSchemaWhiteSpace.Collapse &&
(_derivedRestriction.WhiteSpace == XmlSchemaWhiteSpace.Replace || _derivedRestriction.WhiteSpace == XmlSchemaWhiteSpace.Preserve)
)
{
throw new XmlSchemaException(SR.Sch_WhiteSpaceRestriction1, facet);
}
if (baseWhitespace == XmlSchemaWhiteSpace.Replace &&
_derivedRestriction.WhiteSpace == XmlSchemaWhiteSpace.Preserve
)
{
throw new XmlSchemaException(SR.Sch_WhiteSpaceRestriction2, facet);
}
SetFlag(facet, RestrictionFlags.WhiteSpace);
}
internal void CompileMaxInclusiveFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.MaxInclusive, SR.Sch_MaxInclusiveFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.MaxInclusive, SR.Sch_DupMaxInclusiveFacet);
_derivedRestriction.MaxInclusive = ParseFacetValue(_builtInType, facet, SR.Sch_MaxInclusiveFacetInvalid, null, null);
if ((_baseFixedFlags & RestrictionFlags.MaxInclusive) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.MaxInclusive, _derivedRestriction.MaxInclusive))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
CheckValue(_derivedRestriction.MaxInclusive, facet);
SetFlag(facet, RestrictionFlags.MaxInclusive);
}
internal void CompileMaxExclusiveFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.MaxExclusive, SR.Sch_MaxExclusiveFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.MaxExclusive, SR.Sch_DupMaxExclusiveFacet);
_derivedRestriction.MaxExclusive = ParseFacetValue(_builtInType, facet, SR.Sch_MaxExclusiveFacetInvalid, null, null);
if ((_baseFixedFlags & RestrictionFlags.MaxExclusive) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.MaxExclusive, _derivedRestriction.MaxExclusive))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
CheckValue(_derivedRestriction.MaxExclusive, facet);
SetFlag(facet, RestrictionFlags.MaxExclusive);
}
internal void CompileMinInclusiveFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.MinInclusive, SR.Sch_MinInclusiveFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.MinInclusive, SR.Sch_DupMinInclusiveFacet);
_derivedRestriction.MinInclusive = ParseFacetValue(_builtInType, facet, SR.Sch_MinInclusiveFacetInvalid, null, null);
if ((_baseFixedFlags & RestrictionFlags.MinInclusive) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.MinInclusive, _derivedRestriction.MinInclusive))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
CheckValue(_derivedRestriction.MinInclusive, facet);
SetFlag(facet, RestrictionFlags.MinInclusive);
}
internal void CompileMinExclusiveFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.MinExclusive, SR.Sch_MinExclusiveFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.MinExclusive, SR.Sch_DupMinExclusiveFacet);
_derivedRestriction.MinExclusive = ParseFacetValue(_builtInType, facet, SR.Sch_MinExclusiveFacetInvalid, null, null);
if ((_baseFixedFlags & RestrictionFlags.MinExclusive) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.MinExclusive, _derivedRestriction.MinExclusive))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
CheckValue(_derivedRestriction.MinExclusive, facet);
SetFlag(facet, RestrictionFlags.MinExclusive);
}
internal void CompileTotalDigitsFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.TotalDigits, SR.Sch_TotalDigitsFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.TotalDigits, SR.Sch_DupTotalDigitsFacet);
XmlSchemaDatatype positiveInt = DatatypeImplementation.GetSimpleTypeFromTypeCode(XmlTypeCode.PositiveInteger).Datatype;
_derivedRestriction.TotalDigits = XmlBaseConverter.DecimalToInt32((decimal)ParseFacetValue(positiveInt, facet, SR.Sch_TotalDigitsFacetInvalid, null, null));
if ((_baseFixedFlags & RestrictionFlags.TotalDigits) != 0)
{
if (!_datatype.IsEqual(_datatype.Restriction.TotalDigits, _derivedRestriction.TotalDigits))
{
throw new XmlSchemaException(SR.Sch_FacetBaseFixed, facet);
}
}
if ((_baseFlags & RestrictionFlags.TotalDigits) != 0)
{
if (_derivedRestriction.TotalDigits > _datatype.Restriction.TotalDigits)
{
throw new XmlSchemaException(SR.Sch_TotalDigitsMismatch, string.Empty);
}
}
SetFlag(facet, RestrictionFlags.TotalDigits);
}
internal void CompileFractionDigitsFacet(XmlSchemaFacet facet)
{
CheckProhibitedFlag(facet, RestrictionFlags.FractionDigits, SR.Sch_FractionDigitsFacetProhibited);
CheckDupFlag(facet, RestrictionFlags.FractionDigits, SR.Sch_DupFractionDigitsFacet);
_derivedRestriction.FractionDigits = XmlBaseConverter.DecimalToInt32((decimal)ParseFacetValue(_nonNegativeInt, facet, SR.Sch_FractionDigitsFacetInvalid, null, null));
if ((_derivedRestriction.FractionDigits != 0) && (_datatype.TypeCode != XmlTypeCode.Decimal))
{
throw new XmlSchemaException(SR.Sch_FractionDigitsFacetInvalid, SR.Sch_FractionDigitsNotOnDecimal, facet);
}
if ((_baseFlags & RestrictionFlags.FractionDigits) != 0)
{
if (_derivedRestriction.FractionDigits > _datatype.Restriction.FractionDigits)
{
throw new XmlSchemaException(SR.Sch_TotalDigitsMismatch, string.Empty);
}
}
SetFlag(facet, RestrictionFlags.FractionDigits);
}
internal void FinishFacetCompile()
{
//Additional check for pattern facet
//If facet is XMLSchemaPattern, then the String built inside the loop
//needs to be converted to a RegEx
if (_firstPattern == false)
{
if (_derivedRestriction.Patterns == null)
{
_derivedRestriction.Patterns = new ArrayList();
}
try
{
_regStr.Append(")");
string tempStr = _regStr.ToString();
if (tempStr.IndexOf('|') != -1)
{ // ordinal compare
_regStr.Insert(0, "(");
_regStr.Append(")");
}
_derivedRestriction.Patterns.Add(new Regex(Preprocess(_regStr.ToString()), RegexOptions.None));
}
catch (Exception e)
{
throw new XmlSchemaException(SR.Sch_PatternFacetInvalid, new string[] { e.Message }, e, _pattern_facet.SourceUri, _pattern_facet.LineNumber, _pattern_facet.LinePosition, _pattern_facet);
}
}
}
private void CheckValue(object value, XmlSchemaFacet facet)
{
RestrictionFacets restriction = _datatype.Restriction;
switch (facet.FacetType)
{
case FacetType.MaxInclusive:
if ((_baseFlags & RestrictionFlags.MaxInclusive) != 0)
{ //Base facet has maxInclusive
if (_datatype.Compare(value, restriction.MaxInclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MaxInclusiveMismatch, string.Empty);
}
}
if ((_baseFlags & RestrictionFlags.MaxExclusive) != 0)
{ //Base facet has maxExclusive
if (_datatype.Compare(value, restriction.MaxExclusive) >= 0)
{
throw new XmlSchemaException(SR.Sch_MaxIncExlMismatch, string.Empty);
}
}
break;
case FacetType.MaxExclusive:
if ((_baseFlags & RestrictionFlags.MaxExclusive) != 0)
{ //Base facet has maxExclusive
if (_datatype.Compare(value, restriction.MaxExclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MaxExclusiveMismatch, string.Empty);
}
}
if ((_baseFlags & RestrictionFlags.MaxInclusive) != 0)
{ //Base facet has maxInclusive
if (_datatype.Compare(value, restriction.MaxInclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MaxExlIncMismatch, string.Empty);
}
}
break;
case FacetType.MinInclusive:
if ((_baseFlags & RestrictionFlags.MinInclusive) != 0)
{ //Base facet has minInclusive
if (_datatype.Compare(value, restriction.MinInclusive) < 0)
{
throw new XmlSchemaException(SR.Sch_MinInclusiveMismatch, string.Empty);
}
}
if ((_baseFlags & RestrictionFlags.MinExclusive) != 0)
{ //Base facet has minExclusive
if (_datatype.Compare(value, restriction.MinExclusive) < 0)
{
throw new XmlSchemaException(SR.Sch_MinIncExlMismatch, string.Empty);
}
}
if ((_baseFlags & RestrictionFlags.MaxExclusive) != 0)
{ //Base facet has maxExclusive
if (_datatype.Compare(value, restriction.MaxExclusive) >= 0)
{
throw new XmlSchemaException(SR.Sch_MinIncMaxExlMismatch, string.Empty);
}
}
break;
case FacetType.MinExclusive:
if ((_baseFlags & RestrictionFlags.MinExclusive) != 0)
{ //Base facet has minExclusive
if (_datatype.Compare(value, restriction.MinExclusive) < 0)
{
throw new XmlSchemaException(SR.Sch_MinExclusiveMismatch, string.Empty);
}
}
if ((_baseFlags & RestrictionFlags.MinInclusive) != 0)
{ //Base facet has minInclusive
if (_datatype.Compare(value, restriction.MinInclusive) < 0)
{
throw new XmlSchemaException(SR.Sch_MinExlIncMismatch, string.Empty);
}
}
if ((_baseFlags & RestrictionFlags.MaxExclusive) != 0)
{ //Base facet has maxExclusive
if (_datatype.Compare(value, restriction.MaxExclusive) >= 0)
{
throw new XmlSchemaException(SR.Sch_MinExlMaxExlMismatch, string.Empty);
}
}
break;
default:
Debug.Assert(false);
break;
}
}
internal void CompileFacetCombinations()
{
RestrictionFacets baseRestriction = _datatype.Restriction;
//They are not allowed on the same type but allowed on derived types.
if (
(_derivedRestriction.Flags & RestrictionFlags.MaxInclusive) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MaxExclusive) != 0
)
{
throw new XmlSchemaException(SR.Sch_MaxInclusiveExclusive, string.Empty);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinInclusive) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MinExclusive) != 0
)
{
throw new XmlSchemaException(SR.Sch_MinInclusiveExclusive, string.Empty);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.Length) != 0 &&
(_derivedRestriction.Flags & (RestrictionFlags.MinLength | RestrictionFlags.MaxLength)) != 0
)
{
throw new XmlSchemaException(SR.Sch_LengthAndMinMax, string.Empty);
}
CopyFacetsFromBaseType();
// Check combinations
if (
(_derivedRestriction.Flags & RestrictionFlags.MinLength) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MaxLength) != 0
)
{
if (_derivedRestriction.MinLength > _derivedRestriction.MaxLength)
{
throw new XmlSchemaException(SR.Sch_MinLengthGtMaxLength, string.Empty);
}
}
//TODO MinInc /MinExc /MaxInc / MaxExc checked in derived types
if (
(_derivedRestriction.Flags & RestrictionFlags.MinInclusive) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MaxInclusive) != 0
)
{
if (_datatype.Compare(_derivedRestriction.MinInclusive, _derivedRestriction.MaxInclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MinInclusiveGtMaxInclusive, string.Empty);
}
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinInclusive) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MaxExclusive) != 0
)
{
if (_datatype.Compare(_derivedRestriction.MinInclusive, _derivedRestriction.MaxExclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MinInclusiveGtMaxExclusive, string.Empty);
}
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinExclusive) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MaxExclusive) != 0
)
{
if (_datatype.Compare(_derivedRestriction.MinExclusive, _derivedRestriction.MaxExclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MinExclusiveGtMaxExclusive, string.Empty);
}
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinExclusive) != 0 &&
(_derivedRestriction.Flags & RestrictionFlags.MaxInclusive) != 0
)
{
if (_datatype.Compare(_derivedRestriction.MinExclusive, _derivedRestriction.MaxInclusive) > 0)
{
throw new XmlSchemaException(SR.Sch_MinExclusiveGtMaxInclusive, string.Empty);
}
}
if ((_derivedRestriction.Flags & (RestrictionFlags.TotalDigits | RestrictionFlags.FractionDigits)) == (RestrictionFlags.TotalDigits | RestrictionFlags.FractionDigits))
{
if (_derivedRestriction.FractionDigits > _derivedRestriction.TotalDigits)
{
throw new XmlSchemaException(SR.Sch_FractionDigitsGtTotalDigits, string.Empty);
}
}
}
private void CopyFacetsFromBaseType()
{
RestrictionFacets baseRestriction = _datatype.Restriction;
// Copy additional facets from the base type
if (
(_derivedRestriction.Flags & RestrictionFlags.Length) == 0 &&
(_baseFlags & RestrictionFlags.Length) != 0
)
{
_derivedRestriction.Length = baseRestriction.Length;
SetFlag(RestrictionFlags.Length);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinLength) == 0 &&
(_baseFlags & RestrictionFlags.MinLength) != 0
)
{
_derivedRestriction.MinLength = baseRestriction.MinLength;
SetFlag(RestrictionFlags.MinLength);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MaxLength) == 0 &&
(_baseFlags & RestrictionFlags.MaxLength) != 0
)
{
_derivedRestriction.MaxLength = baseRestriction.MaxLength;
SetFlag(RestrictionFlags.MaxLength);
}
if ((_baseFlags & RestrictionFlags.Pattern) != 0)
{
if (_derivedRestriction.Patterns == null)
{
_derivedRestriction.Patterns = baseRestriction.Patterns;
}
else
{
_derivedRestriction.Patterns.AddRange(baseRestriction.Patterns);
}
SetFlag(RestrictionFlags.Pattern);
}
if ((_baseFlags & RestrictionFlags.Enumeration) != 0)
{
if (_derivedRestriction.Enumeration == null)
{
_derivedRestriction.Enumeration = baseRestriction.Enumeration;
}
SetFlag(RestrictionFlags.Enumeration);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.WhiteSpace) == 0 &&
(_baseFlags & RestrictionFlags.WhiteSpace) != 0
)
{
_derivedRestriction.WhiteSpace = baseRestriction.WhiteSpace;
SetFlag(RestrictionFlags.WhiteSpace);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MaxInclusive) == 0 &&
(_baseFlags & RestrictionFlags.MaxInclusive) != 0
)
{
_derivedRestriction.MaxInclusive = baseRestriction.MaxInclusive;
SetFlag(RestrictionFlags.MaxInclusive);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MaxExclusive) == 0 &&
(_baseFlags & RestrictionFlags.MaxExclusive) != 0
)
{
_derivedRestriction.MaxExclusive = baseRestriction.MaxExclusive;
SetFlag(RestrictionFlags.MaxExclusive);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinInclusive) == 0 &&
(_baseFlags & RestrictionFlags.MinInclusive) != 0
)
{
_derivedRestriction.MinInclusive = baseRestriction.MinInclusive;
SetFlag(RestrictionFlags.MinInclusive);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.MinExclusive) == 0 &&
(_baseFlags & RestrictionFlags.MinExclusive) != 0
)
{
_derivedRestriction.MinExclusive = baseRestriction.MinExclusive;
SetFlag(RestrictionFlags.MinExclusive);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.TotalDigits) == 0 &&
(_baseFlags & RestrictionFlags.TotalDigits) != 0
)
{
_derivedRestriction.TotalDigits = baseRestriction.TotalDigits;
SetFlag(RestrictionFlags.TotalDigits);
}
if (
(_derivedRestriction.Flags & RestrictionFlags.FractionDigits) == 0 &&
(_baseFlags & RestrictionFlags.FractionDigits) != 0
)
{
_derivedRestriction.FractionDigits = baseRestriction.FractionDigits;
SetFlag(RestrictionFlags.FractionDigits);
}
}
private object ParseFacetValue(XmlSchemaDatatype datatype, XmlSchemaFacet facet, string code, IXmlNamespaceResolver nsmgr, XmlNameTable nameTable)
{
object typedValue;
Exception ex = datatype.TryParseValue(facet.Value, nameTable, nsmgr, out typedValue);
if (ex == null)
{
return typedValue;
}
else
{
throw new XmlSchemaException(code, new string[] { ex.Message }, ex, facet.SourceUri, facet.LineNumber, facet.LinePosition, facet);
}
}
private struct Map
{
internal Map(char m, string r)
{
match = m;
replacement = r;
}
internal char match;
internal string replacement;
};
private static readonly Map[] s_map = {
new Map('c', "\\p{_xmlC}"),
new Map('C', "\\P{_xmlC}"),
new Map('d', "\\p{_xmlD}"),
new Map('D', "\\P{_xmlD}"),
new Map('i', "\\p{_xmlI}"),
new Map('I', "\\P{_xmlI}"),
new Map('w', "\\p{_xmlW}"),
new Map('W', "\\P{_xmlW}"),
};
private static string Preprocess(string pattern)
{
StringBuilder bufBld = new StringBuilder();
bufBld.Append("^");
char[] source = pattern.ToCharArray();
int length = pattern.Length;
int copyPosition = 0;
for (int position = 0; position < length - 2; position++)
{
if (source[position] == '\\')
{
if (source[position + 1] == '\\')
{
position++; // skip it
}
else
{
char ch = source[position + 1];
for (int i = 0; i < s_map.Length; i++)
{
if (s_map[i].match == ch)
{
if (copyPosition < position)
{
bufBld.Append(source, copyPosition, position - copyPosition);
}
bufBld.Append(s_map[i].replacement);
position++;
copyPosition = position + 1;
break;
}
}
}
}
}
if (copyPosition < length)
{
bufBld.Append(source, copyPosition, length - copyPosition);
}
bufBld.Append("$");
return bufBld.ToString();
}
private void CheckProhibitedFlag(XmlSchemaFacet facet, RestrictionFlags flag, string errorCode)
{
if ((_validRestrictionFlags & flag) == 0)
{
throw new XmlSchemaException(errorCode, _datatype.TypeCodeString, facet);
}
}
private void CheckDupFlag(XmlSchemaFacet facet, RestrictionFlags flag, string errorCode)
{
if ((_derivedRestriction.Flags & flag) != 0)
{
throw new XmlSchemaException(errorCode, facet);
}
}
private void SetFlag(XmlSchemaFacet facet, RestrictionFlags flag)
{
_derivedRestriction.Flags |= flag;
if (facet.IsFixed)
{
_derivedRestriction.FixedFlags |= flag;
}
}
private void SetFlag(RestrictionFlags flag)
{
_derivedRestriction.Flags |= flag;
if ((_baseFixedFlags & flag) != 0)
{
_derivedRestriction.FixedFlags |= flag;
}
}
}
internal virtual Exception CheckLexicalFacets(ref string parseString, XmlSchemaDatatype datatype)
{
CheckWhitespaceFacets(ref parseString, datatype);
return CheckPatternFacets(datatype.Restriction, parseString);
}
internal virtual Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(decimal value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(Int64 value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(Int32 value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(Int16 value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(byte value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(DateTime value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(double value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(float value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(string value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(byte[] value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(TimeSpan value, XmlSchemaDatatype datatype)
{
return null;
}
internal virtual Exception CheckValueFacets(XmlQualifiedName value, XmlSchemaDatatype datatype)
{
return null;
}
internal void CheckWhitespaceFacets(ref string s, XmlSchemaDatatype datatype)
{
// before parsing, check whitespace facet
RestrictionFacets restriction = datatype.Restriction;
switch (datatype.Variety)
{
case XmlSchemaDatatypeVariety.List:
s = s.Trim();
break;
case XmlSchemaDatatypeVariety.Atomic:
if (datatype.BuiltInWhitespaceFacet == XmlSchemaWhiteSpace.Collapse)
{
s = XmlComplianceUtil.NonCDataNormalize(s);
}
else if (datatype.BuiltInWhitespaceFacet == XmlSchemaWhiteSpace.Replace)
{
s = XmlComplianceUtil.CDataNormalize(s);
}
else if (restriction != null && (restriction.Flags & RestrictionFlags.WhiteSpace) != 0)
{ //Restriction has whitespace facet specified
if (restriction.WhiteSpace == XmlSchemaWhiteSpace.Replace)
{
s = XmlComplianceUtil.CDataNormalize(s);
}
else if (restriction.WhiteSpace == XmlSchemaWhiteSpace.Collapse)
{
s = XmlComplianceUtil.NonCDataNormalize(s);
}
}
break;
default:
break;
}
}
internal Exception CheckPatternFacets(RestrictionFacets restriction, string value)
{
if (restriction != null && (restriction.Flags & RestrictionFlags.Pattern) != 0)
{
for (int i = 0; i < restriction.Patterns.Count; ++i)
{
Regex regex = (Regex)restriction.Patterns[i];
if (!regex.IsMatch(value))
{
return new XmlSchemaException(SR.Sch_PatternConstraintFailed, string.Empty);
}
}
}
return null;
}
internal virtual bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return false;
}
//Compile-time Facet Checking
internal virtual RestrictionFacets ConstructRestriction(DatatypeImplementation datatype, XmlSchemaObjectCollection facets, XmlNameTable nameTable)
{
//Datatype is the type on which this method is called
RestrictionFacets derivedRestriction = new RestrictionFacets();
FacetsCompiler facetCompiler = new FacetsCompiler(datatype, derivedRestriction);
for (int i = 0; i < facets.Count; ++i)
{
XmlSchemaFacet facet = (XmlSchemaFacet)facets[i];
if (facet.Value == null)
{
throw new XmlSchemaException(SR.Sch_InvalidFacet, facet);
}
IXmlNamespaceResolver nsmgr = new SchemaNamespaceManager(facet);
switch (facet.FacetType)
{
case FacetType.Length:
facetCompiler.CompileLengthFacet(facet);
break;
case FacetType.MinLength:
facetCompiler.CompileMinLengthFacet(facet);
break;
case FacetType.MaxLength:
facetCompiler.CompileMaxLengthFacet(facet);
break;
case FacetType.Pattern:
facetCompiler.CompilePatternFacet(facet as XmlSchemaPatternFacet);
break;
case FacetType.Enumeration:
facetCompiler.CompileEnumerationFacet(facet, nsmgr, nameTable);
break;
case FacetType.Whitespace:
facetCompiler.CompileWhitespaceFacet(facet);
break;
case FacetType.MinInclusive:
facetCompiler.CompileMinInclusiveFacet(facet);
break;
case FacetType.MinExclusive:
facetCompiler.CompileMinExclusiveFacet(facet);
break;
case FacetType.MaxInclusive:
facetCompiler.CompileMaxInclusiveFacet(facet);
break;
case FacetType.MaxExclusive:
facetCompiler.CompileMaxExclusiveFacet(facet);
break;
case FacetType.TotalDigits:
facetCompiler.CompileTotalDigitsFacet(facet);
break;
case FacetType.FractionDigits:
facetCompiler.CompileFractionDigitsFacet(facet);
break;
default:
throw new XmlSchemaException(SR.Sch_UnknownFacet, facet);
}
}
facetCompiler.FinishFacetCompile();
facetCompiler.CompileFacetCombinations();
return derivedRestriction;
}
internal static decimal Power(int x, int y)
{
//Returns X raised to the power Y
decimal returnValue = 1m;
decimal decimalValue = (decimal)x;
if (y > 28)
{ //CLR decimal cannot handle more than 29 digits (10 power 28.)
return decimal.MaxValue;
}
for (int i = 0; i < y; i++)
{
returnValue = returnValue * decimalValue;
}
return returnValue;
}
}
internal class Numeric10FacetsChecker : FacetsChecker
{
private static readonly char[] s_signs = new char[] { '+', '-' };
private decimal _maxValue;
private decimal _minValue;
internal Numeric10FacetsChecker(decimal minVal, decimal maxVal)
{
_minValue = minVal;
_maxValue = maxVal;
}
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
decimal decimalValue = datatype.ValueConverter.ToDecimal(value);
return CheckValueFacets(decimalValue, datatype);
}
internal override Exception CheckValueFacets(decimal value, XmlSchemaDatatype datatype)
{
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
XmlValueConverter valueConverter = datatype.ValueConverter;
//Check built-in facets
if (value > _maxValue || value < _minValue)
{
return new OverflowException(SR.Format(SR.XmlConvert_Overflow, value.ToString(CultureInfo.InvariantCulture), datatype.TypeCodeString));
}
//Check user-defined facets
if (flags != 0)
{
if ((flags & RestrictionFlags.MaxInclusive) != 0)
{
if (value > valueConverter.ToDecimal(restriction.MaxInclusive))
{
return new XmlSchemaException(SR.Sch_MaxInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxExclusive) != 0)
{
if (value >= valueConverter.ToDecimal(restriction.MaxExclusive))
{
return new XmlSchemaException(SR.Sch_MaxExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinInclusive) != 0)
{
if (value < valueConverter.ToDecimal(restriction.MinInclusive))
{
return new XmlSchemaException(SR.Sch_MinInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinExclusive) != 0)
{
if (value <= valueConverter.ToDecimal(restriction.MinExclusive))
{
return new XmlSchemaException(SR.Sch_MinExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, valueConverter))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
return CheckTotalAndFractionDigits(value, restriction.TotalDigits, restriction.FractionDigits, ((flags & RestrictionFlags.TotalDigits) != 0), ((flags & RestrictionFlags.FractionDigits) != 0));
}
return null;
}
internal override Exception CheckValueFacets(Int64 value, XmlSchemaDatatype datatype)
{
decimal decimalValue = (decimal)value;
return CheckValueFacets(decimalValue, datatype);
}
internal override Exception CheckValueFacets(Int32 value, XmlSchemaDatatype datatype)
{
decimal decimalValue = (decimal)value;
return CheckValueFacets(decimalValue, datatype);
}
internal override Exception CheckValueFacets(Int16 value, XmlSchemaDatatype datatype)
{
decimal decimalValue = (decimal)value;
return CheckValueFacets(decimalValue, datatype);
}
internal override Exception CheckValueFacets(byte value, XmlSchemaDatatype datatype)
{
decimal decimalValue = (decimal)value;
return CheckValueFacets(decimalValue, datatype);
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration(datatype.ValueConverter.ToDecimal(value), enumeration, datatype.ValueConverter);
}
internal bool MatchEnumeration(decimal value, ArrayList enumeration, XmlValueConverter valueConverter)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (value == valueConverter.ToDecimal(enumeration[i]))
{
return true;
}
}
return false;
}
internal Exception CheckTotalAndFractionDigits(decimal value, int totalDigits, int fractionDigits, bool checkTotal, bool checkFraction)
{
decimal maxValue = FacetsChecker.Power(10, totalDigits) - 1; //(decimal)Math.Pow(10, totalDigits) - 1 ;
int powerCnt = 0;
if (value < 0)
{
value = Decimal.Negate(value); //Need to compare maxValue allowed against the absolute value
}
while (Decimal.Truncate(value) != value)
{ //Till it has a fraction
value = value * 10;
powerCnt++;
}
if (checkTotal && (value > maxValue || powerCnt > totalDigits))
{
return new XmlSchemaException(SR.Sch_TotalDigitsConstraintFailed, string.Empty);
}
if (checkFraction && powerCnt > fractionDigits)
{
return new XmlSchemaException(SR.Sch_FractionDigitsConstraintFailed, string.Empty);
}
return null;
}
}
internal class Numeric2FacetsChecker : FacetsChecker
{
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
double doubleValue = datatype.ValueConverter.ToDouble(value);
return CheckValueFacets(doubleValue, datatype);
}
internal override Exception CheckValueFacets(double value, XmlSchemaDatatype datatype)
{
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
XmlValueConverter valueConverter = datatype.ValueConverter;
if ((flags & RestrictionFlags.MaxInclusive) != 0)
{
if (value > valueConverter.ToDouble(restriction.MaxInclusive))
{
return new XmlSchemaException(SR.Sch_MaxInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxExclusive) != 0)
{
if (value >= valueConverter.ToDouble(restriction.MaxExclusive))
{
return new XmlSchemaException(SR.Sch_MaxExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinInclusive) != 0)
{
if (value < (valueConverter.ToDouble(restriction.MinInclusive)))
{
return new XmlSchemaException(SR.Sch_MinInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinExclusive) != 0)
{
if (value <= valueConverter.ToDouble(restriction.MinExclusive))
{
return new XmlSchemaException(SR.Sch_MinExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, valueConverter))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
return null;
}
internal override Exception CheckValueFacets(float value, XmlSchemaDatatype datatype)
{
double doubleValue = (double)value;
return CheckValueFacets(doubleValue, datatype);
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration(datatype.ValueConverter.ToDouble(value), enumeration, datatype.ValueConverter);
}
private bool MatchEnumeration(double value, ArrayList enumeration, XmlValueConverter valueConverter)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (value == valueConverter.ToDouble(enumeration[i]))
{
return true;
}
}
return false;
}
}
internal class DurationFacetsChecker : FacetsChecker
{
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
TimeSpan timeSpanValue = (TimeSpan)datatype.ValueConverter.ChangeType(value, typeof(TimeSpan));
return CheckValueFacets(timeSpanValue, datatype);
}
internal override Exception CheckValueFacets(TimeSpan value, XmlSchemaDatatype datatype)
{
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
if ((flags & RestrictionFlags.MaxInclusive) != 0)
{
if (TimeSpan.Compare(value, (TimeSpan)restriction.MaxInclusive) > 0)
{
return new XmlSchemaException(SR.Sch_MaxInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxExclusive) != 0)
{
if (TimeSpan.Compare(value, (TimeSpan)restriction.MaxExclusive) >= 0)
{
return new XmlSchemaException(SR.Sch_MaxExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinInclusive) != 0)
{
if (TimeSpan.Compare(value, (TimeSpan)restriction.MinInclusive) < 0)
{
return new XmlSchemaException(SR.Sch_MinInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinExclusive) != 0)
{
if (TimeSpan.Compare(value, (TimeSpan)restriction.MinExclusive) <= 0)
{
return new XmlSchemaException(SR.Sch_MinExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration((TimeSpan)value, enumeration);
}
private bool MatchEnumeration(TimeSpan value, ArrayList enumeration)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (TimeSpan.Compare(value, (TimeSpan)enumeration[i]) == 0)
{
return true;
}
}
return false;
}
}
internal class DateTimeFacetsChecker : FacetsChecker
{
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
DateTime dateTimeValue = datatype.ValueConverter.ToDateTime(value);
return CheckValueFacets(dateTimeValue, datatype);
}
internal override Exception CheckValueFacets(DateTime value, XmlSchemaDatatype datatype)
{
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
if ((flags & RestrictionFlags.MaxInclusive) != 0)
{
if (datatype.Compare(value, (DateTime)restriction.MaxInclusive) > 0)
{
return new XmlSchemaException(SR.Sch_MaxInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxExclusive) != 0)
{
if (datatype.Compare(value, (DateTime)restriction.MaxExclusive) >= 0)
{
return new XmlSchemaException(SR.Sch_MaxExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinInclusive) != 0)
{
if (datatype.Compare(value, (DateTime)restriction.MinInclusive) < 0)
{
return new XmlSchemaException(SR.Sch_MinInclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinExclusive) != 0)
{
if (datatype.Compare(value, (DateTime)restriction.MinExclusive) <= 0)
{
return new XmlSchemaException(SR.Sch_MinExclusiveConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, datatype))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration(datatype.ValueConverter.ToDateTime(value), enumeration, datatype);
}
private bool MatchEnumeration(DateTime value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (datatype.Compare(value, (DateTime)enumeration[i]) == 0)
{
return true;
}
}
return false;
}
}
internal class StringFacetsChecker : FacetsChecker
{ //All types derived from string & anyURI
private static Regex s_languagePattern;
private static Regex LanguagePattern
{
get
{
if (s_languagePattern == null)
{
Regex langRegex = new Regex("^([a-zA-Z]{1,8})(-[a-zA-Z0-9]{1,8})*$", RegexOptions.None);
Interlocked.CompareExchange(ref s_languagePattern, langRegex, null);
}
return s_languagePattern;
}
}
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
string stringValue = datatype.ValueConverter.ToString(value);
return CheckValueFacets(stringValue, datatype, true);
}
internal override Exception CheckValueFacets(string value, XmlSchemaDatatype datatype)
{
return CheckValueFacets(value, datatype, true);
}
internal Exception CheckValueFacets(string value, XmlSchemaDatatype datatype, bool verifyUri)
{
//Length, MinLength, MaxLength
int length = value.Length;
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
Exception exception;
exception = CheckBuiltInFacets(value, datatype.TypeCode, verifyUri);
if (exception != null) return exception;
if (flags != 0)
{
if ((flags & RestrictionFlags.Length) != 0)
{
if (restriction.Length != length)
{
return new XmlSchemaException(SR.Sch_LengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinLength) != 0)
{
if (length < restriction.MinLength)
{
return new XmlSchemaException(SR.Sch_MinLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxLength) != 0)
{
if (restriction.MaxLength < length)
{
return new XmlSchemaException(SR.Sch_MaxLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, datatype))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration(datatype.ValueConverter.ToString(value), enumeration, datatype);
}
private bool MatchEnumeration(string value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
if (datatype.TypeCode == XmlTypeCode.AnyUri)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (value.Equals(((Uri)enumeration[i]).OriginalString))
{
return true;
}
}
}
else
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (value.Equals((string)enumeration[i]))
{
return true;
}
}
}
return false;
}
private Exception CheckBuiltInFacets(string s, XmlTypeCode typeCode, bool verifyUri)
{
Exception exception = null;
switch (typeCode)
{
case XmlTypeCode.AnyUri:
if (verifyUri)
{
Uri uri;
exception = XmlConvert.TryToUri(s, out uri);
}
break;
case XmlTypeCode.NormalizedString:
exception = XmlConvert.TryVerifyNormalizedString(s);
break;
case XmlTypeCode.Token:
exception = XmlConvert.TryVerifyTOKEN(s);
break;
case XmlTypeCode.Language:
if (s == null || s.Length == 0)
{
return new XmlSchemaException(SR.Sch_EmptyAttributeValue, string.Empty);
}
if (!LanguagePattern.IsMatch(s))
{
return new XmlSchemaException(SR.Sch_InvalidLanguageId, string.Empty);
}
break;
case XmlTypeCode.NmToken:
exception = XmlConvert.TryVerifyNMTOKEN(s);
break;
case XmlTypeCode.Name:
exception = XmlConvert.TryVerifyName(s);
break;
case XmlTypeCode.NCName:
case XmlTypeCode.Id:
case XmlTypeCode.Idref:
case XmlTypeCode.Entity:
exception = XmlConvert.TryVerifyNCName(s);
break;
default:
break;
}
return exception;
}
}
internal class QNameFacetsChecker : FacetsChecker
{
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
XmlQualifiedName qualifiedNameValue = (XmlQualifiedName)datatype.ValueConverter.ChangeType(value, typeof(XmlQualifiedName));
return CheckValueFacets(qualifiedNameValue, datatype);
}
internal override Exception CheckValueFacets(XmlQualifiedName value, XmlSchemaDatatype datatype)
{
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
if (flags != 0)
{ //If there are facets defined
string strValue = value.ToString();
int length = strValue.Length;
if ((flags & RestrictionFlags.Length) != 0)
{
if (restriction.Length != length)
{
return new XmlSchemaException(SR.Sch_LengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinLength) != 0)
{
if (length < restriction.MinLength)
{
return new XmlSchemaException(SR.Sch_MinLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxLength) != 0)
{
if (restriction.MaxLength < length)
{
return new XmlSchemaException(SR.Sch_MaxLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration((XmlQualifiedName)datatype.ValueConverter.ChangeType(value, typeof(XmlQualifiedName)), enumeration);
}
private bool MatchEnumeration(XmlQualifiedName value, ArrayList enumeration)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (value.Equals((XmlQualifiedName)enumeration[i]))
{
return true;
}
}
return false;
}
}
internal class MiscFacetsChecker : FacetsChecker
{ //For bool, anySimpleType
}
internal class BinaryFacetsChecker : FacetsChecker
{ //hexBinary & Base64Binary
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
byte[] byteArrayValue = (byte[])value;
return CheckValueFacets(byteArrayValue, datatype);
}
internal override Exception CheckValueFacets(byte[] value, XmlSchemaDatatype datatype)
{
//Length, MinLength, MaxLength
RestrictionFacets restriction = datatype.Restriction;
int length = value.Length;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
if (flags != 0)
{ //if it has facets defined
if ((flags & RestrictionFlags.Length) != 0)
{
if (restriction.Length != length)
{
return new XmlSchemaException(SR.Sch_LengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinLength) != 0)
{
if (length < restriction.MinLength)
{
return new XmlSchemaException(SR.Sch_MinLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxLength) != 0)
{
if (restriction.MaxLength < length)
{
return new XmlSchemaException(SR.Sch_MaxLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, datatype))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
return MatchEnumeration((byte[])value, enumeration, datatype);
}
private bool MatchEnumeration(byte[] value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (datatype.Compare(value, (byte[])enumeration[i]) == 0)
{
return true;
}
}
return false;
}
}
internal class ListFacetsChecker : FacetsChecker
{
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
//Check for facets allowed on lists - Length, MinLength, MaxLength
Array values = value as Array;
Debug.Assert(values != null);
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
if ((flags & (RestrictionFlags.Length | RestrictionFlags.MinLength | RestrictionFlags.MaxLength)) != 0)
{
int length = values.Length;
if ((flags & RestrictionFlags.Length) != 0)
{
if (restriction.Length != length)
{
return new XmlSchemaException(SR.Sch_LengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MinLength) != 0)
{
if (length < restriction.MinLength)
{
return new XmlSchemaException(SR.Sch_MinLengthConstraintFailed, string.Empty);
}
}
if ((flags & RestrictionFlags.MaxLength) != 0)
{
if (restriction.MaxLength < length)
{
return new XmlSchemaException(SR.Sch_MaxLengthConstraintFailed, string.Empty);
}
}
}
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, datatype))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (datatype.Compare(value, enumeration[i]) == 0)
{
return true;
}
}
return false;
}
}
internal class UnionFacetsChecker : FacetsChecker
{
internal override Exception CheckValueFacets(object value, XmlSchemaDatatype datatype)
{
RestrictionFacets restriction = datatype.Restriction;
RestrictionFlags flags = restriction != null ? restriction.Flags : 0;
if ((flags & RestrictionFlags.Enumeration) != 0)
{
if (!MatchEnumeration(value, restriction.Enumeration, datatype))
{
return new XmlSchemaException(SR.Sch_EnumerationConstraintFailed, string.Empty);
}
}
return null;
}
internal override bool MatchEnumeration(object value, ArrayList enumeration, XmlSchemaDatatype datatype)
{
for (int i = 0; i < enumeration.Count; ++i)
{
if (datatype.Compare(value, enumeration[i]) == 0)
{ //Compare on Datatype_union will compare two XsdSimpleValue
return true;
}
}
return false;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//-----------------------------------------------------------------------------
//
// Description:
// ContentType class parses and validates the content-type string.
// It provides functionality to compare the type/subtype values.
//
// Details:
// Grammar which this class follows -
//
// Content-type grammar MUST conform to media-type grammar as per
// RFC 2616 (ABNF notation):
//
// media-type = type "/" subtype *( ";" parameter )
// type = token
// subtype = token
// parameter = attribute "=" value
// attribute = token
// value = token | quoted-string
// quoted-string = ( <"> *(qdtext | quoted-pair ) <"> )
// qdtext = <any TEXT except <">>
// quoted-pair = "\" CHAR
// token = 1*<any CHAR except CTLs or separators>
// separators = "(" | ")" | "<" | ">" | "@"
// | "," | ";" | ":" | "\" | <">
// | "/" | "[" | "]" | "?" | "="
// | "{" | "}" | SP | HT
// TEXT = <any OCTET except CTLs, but including LWS>
// OCTET = <any 8-bit sequence of data>
// CHAR = <any US-ASCII character (octets 0 - 127)>
// CTL = <any US-ASCII control character(octets 0 - 31)and DEL(127)>
// CR = <US-ASCII CR, carriage return (13)>
// LF = <US-ASCII LF, linefeed (10)>
// SP = <US-ASCII SP, space (32)>
// HT = <US-ASCII HT, horizontal-tab (9)>
// <"> = <US-ASCII double-quote mark (34)>
// LWS = [CRLF] 1*( SP | HT )
// CRLF = CR LF
// Linear white space (LWS) MUST NOT be used between the type and subtype, nor
// between an attribute and its value. Leading and trailing LWS are prohibited.
//
//-----------------------------------------------------------------------------
using System;
using System.Collections.Generic; // For Dictionary<string, string>
using System.Text; // For StringBuilder
using System.Diagnostics; // For Debug.Assert
namespace System.IO.Packaging
{
/// <summary>
/// Content Type class
/// </summary>
internal sealed class ContentType
{
//------------------------------------------------------
//
// Internal Constructors
//
//------------------------------------------------------
#region Internal Constructors
/// <summary>
/// This constructor creates a ContentType object that represents
/// the content-type string. At construction time we validate the
/// string as per the grammar specified in RFC 2616.
/// Note: We allow empty strings as valid input. Empty string should
/// we used more as an indication of an absent/unknown ContentType.
/// </summary>
/// <param name="contentType">content-type</param>
/// <exception cref="ArgumentNullException">If the contentType parameter is null</exception>
/// <exception cref="ArgumentException">If the contentType string has leading or
/// trailing Linear White Spaces(LWS) characters</exception>
/// <exception cref="ArgumentException">If the contentType string invalid CR-LF characters</exception>
internal ContentType(string contentType)
{
if (contentType == null)
throw new ArgumentNullException("contentType");
if (contentType.Length == 0)
{
_contentType = String.Empty;
}
else
{
if (IsLinearWhiteSpaceChar(contentType[0]) || IsLinearWhiteSpaceChar(contentType[contentType.Length - 1]))
throw new ArgumentException(SR.ContentTypeCannotHaveLeadingTrailingLWS);
//Carriage return can be expressed as '\r\n' or '\n\r'
//We need to make sure that a \r is accompanied by \n
ValidateCarriageReturns(contentType);
//Begin Parsing
int semiColonIndex = contentType.IndexOf(SemicolonSeparator);
if (semiColonIndex == -1)
{
// Parse content type similar to - type/subtype
ParseTypeAndSubType(contentType);
}
else
{
// Parse content type similar to - type/subtype ; param1=value1 ; param2=value2 ; param3="value3"
ParseTypeAndSubType(contentType.Substring(0, semiColonIndex));
ParseParameterAndValue(contentType.Substring(semiColonIndex));
}
}
// keep this untouched for return from OriginalString property
_originalString = contentType;
//This variable is used to print out the correct content type string representation
//using the ToString method. This is mainly important while debugging and seeing the
//value of the content type object in the debugger.
_isInitialized = true;
}
#endregion Internal Constructors
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
#region Internal Properties
/// <summary>
/// TypeComponent of the Content Type
/// If the content type is "text/xml". This property will return "text"
/// </summary>
internal string TypeComponent
{
get
{
return _type;
}
}
/// <summary>
/// SubType component
/// If the content type is "text/xml". This property will return "xml"
/// </summary>
internal string SubTypeComponent
{
get
{
return _subType;
}
}
/// <summary>
/// Enumerator which iterates over the Parameter and Value pairs which are stored
/// in a dictionary. We hand out just the enumerator in order to make this property
/// ReadOnly
/// Consider following Content type -
/// type/subtype ; param1=value1 ; param2=value2 ; param3="value3"
/// This will return a enumerator over a dictionary of the parameter/value pairs.
/// </summary>
internal Dictionary<string, string>.Enumerator ParameterValuePairs
{
get
{
EnsureParameterDictionary();
return _parameterDictionary.GetEnumerator();
}
}
#endregion Internal Properties
//------------------------------------------------------
//
// Internal Methods
//
//------------------------------------------------------
#region Internal Methods
/// <summary>
/// This method does a strong comparison of the content types, as parameters are not allowed.
/// We only compare the type and subType values in an ASCII case-insensitive manner.
/// Parameters are not allowed to be present on any of the content type operands.
/// </summary>
/// <param name="contentType">Content type to be compared with</param>
/// <returns></returns>
internal bool AreTypeAndSubTypeEqual(ContentType contentType)
{
return AreTypeAndSubTypeEqual(contentType, false);
}
/// <summary>
/// This method does a weak comparison of the content types. We only compare the
/// type and subType values in an ASCII case-insensitive manner.
/// Parameter and value pairs are not used for the comparison.
/// If you wish to compare the paramters too, then you must get the ParameterValuePairs from
/// both the ContentType objects and compare each parameter entry.
/// The allowParameterValuePairs parameter is used to indicate whether the
/// comparison is tolerant to parameters being present or no.
/// </summary>
/// <param name="contentType">Content type to be compared with</param>
/// <param name="allowParameterValuePairs">If true, allows the presence of parameter value pairs.
/// If false, parameter/value pairs cannot be present in the content type string.
/// In either case, the parameter value pair is not used for the comparison.</param>
/// <returns></returns>
internal bool AreTypeAndSubTypeEqual(ContentType contentType, bool allowParameterValuePairs)
{
bool result = false;
if (contentType != null)
{
if (!allowParameterValuePairs)
{
//Return false if this content type object has parameters
if (_parameterDictionary != null)
{
if (_parameterDictionary.Count > 0)
return false;
}
//Return false if the content type object passed in has parameters
Dictionary<string, string>.Enumerator contentTypeEnumerator;
contentTypeEnumerator = contentType.ParameterValuePairs;
contentTypeEnumerator.MoveNext();
if (contentTypeEnumerator.Current.Key != null)
return false;
}
// Perform a case-insensitive comparison on the type/subtype strings. This is a
// safe comparison because the _type and _subType strings have been restricted to
// ASCII characters, digits, and a small set of symbols. This is not a safe comparison
// for the broader set of strings that have not been restricted in the same way.
result = (String.Equals(_type, contentType.TypeComponent, StringComparison.OrdinalIgnoreCase) &&
String.Equals(_subType, contentType.SubTypeComponent, StringComparison.OrdinalIgnoreCase));
}
return result;
}
/// <summary>
/// ToString - outputs a normalized form of the content type string
/// </summary>
/// <returns></returns>
public override string ToString()
{
if (_contentType == null)
{
//This is needed so that while debugging we get the correct
//string
if (!_isInitialized)
return String.Empty;
Debug.Assert(String.CompareOrdinal(_type, String.Empty) != 0
|| String.CompareOrdinal(_subType, String.Empty) != 0);
StringBuilder stringBuilder = new StringBuilder(_type);
stringBuilder.Append(PackUriHelper.ForwardSlashChar);
stringBuilder.Append(_subType);
if (_parameterDictionary != null && _parameterDictionary.Count > 0)
{
foreach (string parameterKey in _parameterDictionary.Keys)
{
stringBuilder.Append(s_linearWhiteSpaceChars[0]);
stringBuilder.Append(SemicolonSeparator);
stringBuilder.Append(s_linearWhiteSpaceChars[0]);
stringBuilder.Append(parameterKey);
stringBuilder.Append(EqualSeparator);
stringBuilder.Append(_parameterDictionary[parameterKey]);
}
}
_contentType = stringBuilder.ToString();
}
return _contentType;
}
#endregion Internal Methods
//------------------------------------------------------
//
// Private Methods
//
//------------------------------------------------------
#region Private Methods
/// <summary>
/// This method validates if the content type string has
/// valid CR-LF characters. Specifically we test if '\r' is
/// accompanied by a '\n' in the string, else its an error.
/// </summary>
/// <param name="contentType"></param>
private static void ValidateCarriageReturns(string contentType)
{
Debug.Assert(!IsLinearWhiteSpaceChar(contentType[0]) && !IsLinearWhiteSpaceChar(contentType[contentType.Length - 1]));
//Prior to calling this method we have already checked that first and last
//character of the content type are not Linear White Spaces. So its safe to
//assume that the index will be greater than 0 and less that length-2.
int index = contentType.IndexOf(s_linearWhiteSpaceChars[2]);
while (index != -1)
{
if (contentType[index - 1] == s_linearWhiteSpaceChars[1] || contentType[index + 1] == s_linearWhiteSpaceChars[1])
{
index = contentType.IndexOf(s_linearWhiteSpaceChars[2], ++index);
}
else
throw new ArgumentException(SR.InvalidLinearWhiteSpaceCharacter);
}
}
/// <summary>
/// Parses the type and subType tokens from the string.
/// Also verifies if the Tokens are valid as per the grammar.
/// </summary>
/// <param name="typeAndSubType">substring that has the type and subType of the content type</param>
/// <exception cref="ArgumentException">If the typeAndSubType parameter does not have the "/" character</exception>
private void ParseTypeAndSubType(string typeAndSubType)
{
//okay to trim at this point the end of the string as Linear White Spaces(LWS) chars are allowed here.
typeAndSubType = typeAndSubType.TrimEnd(s_linearWhiteSpaceChars);
string[] splitBasedOnForwardSlash = typeAndSubType.Split(PackUriHelper.s_forwardSlashCharArray);
if (splitBasedOnForwardSlash.Length != 2)
throw new ArgumentException(SR.InvalidTypeSubType);
_type = ValidateToken(splitBasedOnForwardSlash[0]);
_subType = ValidateToken(splitBasedOnForwardSlash[1]);
}
/// <summary>
/// Parse the individual parameter=value strings
/// </summary>
/// <param name="parameterAndValue">This string has the parameter and value pair of the form
/// parameter=value</param>
/// <exception cref="ArgumentException">If the string does not have the required "="</exception>
private void ParseParameterAndValue(string parameterAndValue)
{
while (parameterAndValue != string.Empty)
{
//At this point the first character MUST be a semi-colon
//First time through this test is serving more as an assert.
if (parameterAndValue[0] != SemicolonSeparator)
throw new ArgumentException(SR.ExpectingSemicolon);
//At this point if we have just one semicolon, then its an error.
//Also, there can be no trailing LWS characters, as we already checked for that
//in the constructor.
if (parameterAndValue.Length == 1)
throw new ArgumentException(SR.ExpectingParameterValuePairs);
//Removing the leading ; from the string
parameterAndValue = parameterAndValue.Substring(1);
//okay to trim start as there can be spaces before the begining
//of the parameter name.
parameterAndValue = parameterAndValue.TrimStart(s_linearWhiteSpaceChars);
int equalSignIndex = parameterAndValue.IndexOf(EqualSeparator);
if (equalSignIndex <= 0 || equalSignIndex == (parameterAndValue.Length - 1))
throw new ArgumentException(SR.InvalidParameterValuePair);
int parameterStartIndex = equalSignIndex + 1;
//Get length of the parameter value
int parameterValueLength = GetLengthOfParameterValue(parameterAndValue, parameterStartIndex);
EnsureParameterDictionary();
_parameterDictionary.Add(
ValidateToken(parameterAndValue.Substring(0, equalSignIndex)),
ValidateQuotedStringOrToken(parameterAndValue.Substring(parameterStartIndex, parameterValueLength)));
parameterAndValue = parameterAndValue.Substring(parameterStartIndex + parameterValueLength).TrimStart(s_linearWhiteSpaceChars);
}
}
/// <summary>
/// This method returns the length of the first parameter value in the input string.
/// </summary>
/// <param name="s"></param>
/// <param name="startIndex">Starting index for parsing</param>
/// <returns></returns>
private static int GetLengthOfParameterValue(string s, int startIndex)
{
Debug.Assert(s != null);
int length = 0;
//if the parameter value does not start with a '"' then,
//we expect a valid token. So we look for Linear White Spaces or
//a ';' as the terminator for the token value.
if (s[startIndex] != '"')
{
int semicolonIndex = s.IndexOf(SemicolonSeparator, startIndex);
if (semicolonIndex != -1)
{
int lwsIndex = s.IndexOfAny(s_linearWhiteSpaceChars, startIndex);
if (lwsIndex != -1 && lwsIndex < semicolonIndex)
length = lwsIndex;
else
length = semicolonIndex;
}
else
length = semicolonIndex;
//If there is no linear white space found we treat the entire remaining string as
//parameter value.
if (length == -1)
length = s.Length;
}
else
{
//if the parameter value starts with a '"' then, we need to look for the
//pairing '"' that is not preceded by a "\" ["\" is used to escape the '"']
bool found = false;
length = startIndex;
while (!found)
{
length = s.IndexOf('"', ++length);
if (length == -1)
throw new ArgumentException(SR.InvalidParameterValue);
if (s[length - 1] != '\\')
{
found = true;
length++;
}
}
}
return length - startIndex;
}
/// <summary>
/// Validating the given token
/// The following checks are being made -
/// 1. If all the characters in the token are either ASCII letter or digit.
/// 2. If all the characters in the token are either from the remaining allowed cha----ter set.
/// </summary>
/// <param name="token">string token</param>
/// <returns>validated string token</returns>
/// <exception cref="ArgumentException">If the token is Empty</exception>
private static string ValidateToken(string token)
{
if (String.IsNullOrEmpty(token))
throw new ArgumentException(SR.InvalidToken);
for (int i = 0; i < token.Length; i++)
{
if (!IsAsciiLetterOrDigit(token[i]) && !IsAllowedCharacter(token[i]))
{
throw new ArgumentException(SR.InvalidToken);
}
}
return token;
}
/// <summary>
/// Validating if the value of a parameter is either a valid token or a
/// valid quoted string
/// </summary>
/// <param name="parameterValue">paramter value string</param>
/// <returns>validate parameter value string</returns>
/// <exception cref="ArgumentException">If the paramter value is empty</exception>
private static string ValidateQuotedStringOrToken(string parameterValue)
{
if (String.IsNullOrEmpty(parameterValue))
throw new ArgumentException(SR.InvalidParameterValue);
if (parameterValue.Length >= 2 &&
parameterValue.StartsWith(Quote, StringComparison.Ordinal) &&
parameterValue.EndsWith(Quote, StringComparison.Ordinal))
ValidateQuotedText(parameterValue.Substring(1, parameterValue.Length - 2));
else
ValidateToken(parameterValue);
return parameterValue;
}
/// <summary>
/// This method validates if the text in the quoted string
/// </summary>
/// <param name="quotedText"></param>
private static void ValidateQuotedText(string quotedText)
{
//empty is okay
for (int i = 0; i < quotedText.Length; i++)
{
if (IsLinearWhiteSpaceChar(quotedText[i]))
continue;
if (quotedText[i] <= ' ' || quotedText[i] >= 0xFF)
throw new ArgumentException(SR.InvalidParameterValue);
else
if (quotedText[i] == '"' &&
(i == 0 || quotedText[i - 1] != '\\'))
throw new ArgumentException(SR.InvalidParameterValue);
}
}
/// <summary>
/// Returns true if the input character is an allowed character
/// Returns false if the input cha----ter is not an allowed character
/// </summary>
/// <param name="character">input character</param>
/// <returns></returns>
private static bool IsAllowedCharacter(char character)
{
return Array.IndexOf(s_allowedCharacters, character) >= 0;
}
/// <summary>
/// Returns true if the input character is an ASCII digit or letter
/// Returns false if the input character is not an ASCII digit or letter
/// </summary>
/// <param name="character">input character</param>
/// <returns></returns>
private static bool IsAsciiLetterOrDigit(char character)
{
return (IsAsciiLetter(character) || (character >= '0' && character <= '9'));
}
/// <summary>
/// Returns true if the input character is an ASCII letter
/// Returns false if the input character is not an ASCII letter
/// </summary>
/// <param name="character">input character</param>
/// <returns></returns>
private static bool IsAsciiLetter(char character)
{
return
(character >= 'a' && character <= 'z') ||
(character >= 'A' && character <= 'Z');
}
/// <summary>
/// Returns true if the input character is one of the Linear White Space characters -
/// ' ', '\t', '\n', '\r'
/// Returns false if the input character is none of the above
/// </summary>
/// <param name="ch">input character</param>
/// <returns></returns>
private static bool IsLinearWhiteSpaceChar(char ch)
{
if (ch > ' ')
{
return false;
}
int whiteSpaceIndex = Array.IndexOf(s_linearWhiteSpaceChars, ch);
return whiteSpaceIndex != -1;
}
/// <summary>
/// Lazy initialization for the ParameterDictionary
/// </summary>
private void EnsureParameterDictionary()
{
if (_parameterDictionary == null)
{
_parameterDictionary = new Dictionary<string, string>(); //initial size 0
}
}
#endregion Private Methods
//------------------------------------------------------
//
// Private Members
//
//------------------------------------------------------
#region Private Members
private string _contentType = null;
private string _type = String.Empty;
private string _subType = String.Empty;
private string _originalString;
private Dictionary<string, string> _parameterDictionary = null;
private bool _isInitialized = false;
private const string Quote = "\"";
private const char SemicolonSeparator = ';';
private const char EqualSeparator = '=';
//This array is sorted by the ascii value of these characters.
private static readonly char[] s_allowedCharacters =
{ '!' /*33*/, '#' /*35*/ , '$' /*36*/,
'%' /*37*/, '&' /*38*/ , '\'' /*39*/,
'*' /*42*/, '+' /*43*/ , '-' /*45*/,
'.' /*46*/, '^' /*94*/ , '_' /*95*/,
'`' /*96*/, '|' /*124*/, '~' /*126*/,
};
//Linear White Space characters
private static readonly char[] s_linearWhiteSpaceChars =
{ ' ', // space - \x20
'\n', // new line - \x0A
'\r', // carriage return - \x0D
'\t' // horizontal tab - \x09
};
#endregion Private Members
}
}
| |
/********************************************************************++
Copyright (c) Microsoft Corporation. All rights reserved.
Description:
Windows Vista and later support non-traditional UI fallback ie., a
user on an Arabic machine can choose either French or English(US) as
UI fallback language.
CLR does not support this (non-traditional) fallback mechanism. So
the static methods in this class calculate appropriate UI Culture
natively. ConsoleHot uses this API to set correct Thread UICulture.
Dependent on:
GetThreadPreferredUILanguages
SetThreadPreferredUILanguages
These methods are available on Windows Vista and later.
--********************************************************************/
using System;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Text;
using Dbg = System.Management.Automation.Diagnostics;
using WORD = System.UInt16;
namespace Microsoft.PowerShell
{
/// <summary>
/// Custom culture
/// </summary>
internal class VistaCultureInfo : CultureInfo
{
private string[] _fallbacks;
// Cache the immediate parent and immediate fallback
private VistaCultureInfo _parentCI = null;
private object _syncObject = new object();
/// <summary>
/// Constructs a CultureInfo that keeps track of fallbacks
/// </summary>
/// <param name="name">Name of the culture to construct.</param>
/// <param name="fallbacks">
/// ordered,null-delimited list of fallbacks
/// </param>
public VistaCultureInfo(string name,
string[] fallbacks)
: base(name)
{
_fallbacks = fallbacks;
}
/// <summary>
/// Returns Parent culture for the current CultureInfo.
/// If Parent.Name is null or empty, then chooses the immediate fallback
/// If it is not empty, otherwise just returns Parent.
/// </summary>
public override CultureInfo Parent
{
get
{
// First traverse the parent hierarchy as established by CLR.
// This is required because there is difference in the parent hierarchy
// between CLR and Windows for Chinese. Ex: Native windows has
// zh-CN->zh-Hans->neutral whereas CLR has zh-CN->zh-CHS->zh-Hans->neutral
if ((null != base.Parent) && (!string.IsNullOrEmpty(base.Parent.Name)))
{
return ImmediateParent;
}
// Check whether we have any fallback specified
// MUI_MERGE_SYSTEM_FALLBACK | MUI_MERGE_USER_FALLBACK
// returns fallback cultures (specified by the user)
// and also adds neutral culture where appropriate.
// Ex: ja-jp ja en-us en
while ((null != _fallbacks) && (_fallbacks.Length > 0))
{
string fallback = _fallbacks[0];
string[] fallbacksForParent = null;
if (_fallbacks.Length > 1)
{
fallbacksForParent = new string[_fallbacks.Length - 1];
Array.Copy(_fallbacks, 1, fallbacksForParent, 0, _fallbacks.Length - 1);
}
try
{
return new VistaCultureInfo(fallback, fallbacksForParent);
}
// if there is any exception constructing the culture..catch..and go to
// the next culture in the list.
catch (ArgumentException)
{
_fallbacks = fallbacksForParent;
}
}
//no fallbacks..just return base parent
return base.Parent;
}
}
/// <summary>
/// This is called to create the parent culture (as defined by CLR)
/// of the current culture.
/// </summary>
private VistaCultureInfo ImmediateParent
{
get
{
if (null == _parentCI)
{
lock (_syncObject)
{
if (null == _parentCI)
{
string parentCulture = base.Parent.Name;
// remove the parentCulture from the m_fallbacks list.
// ie., remove duplicates from the parent hierarchy.
string[] fallbacksForTheParent = null;
if (null != _fallbacks)
{
fallbacksForTheParent = new string[_fallbacks.Length];
int currentIndex = 0;
foreach (string culture in _fallbacks)
{
if (!parentCulture.Equals(culture, StringComparison.OrdinalIgnoreCase))
{
fallbacksForTheParent[currentIndex] = culture;
currentIndex++;
}
}
// There is atleast 1 duplicate in m_fallbacks which was not added to
// fallbacksForTheParent array. Resize the array to take care of this.
if (_fallbacks.Length != currentIndex)
{
Array.Resize<string>(ref fallbacksForTheParent, currentIndex);
}
}
_parentCI = new VistaCultureInfo(parentCulture, fallbacksForTheParent);
}
}
}
return _parentCI;
}
}
/// <summary>
/// Clones the custom CultureInfo retaining the fallbacks.
/// </summary>
/// <returns>Cloned custom CultureInfo</returns>
public override object Clone()
{
return new VistaCultureInfo(base.Name, _fallbacks);
}
}
/// <summary>
/// Static wrappers to get User chosen UICulture (for Vista and later)
/// </summary>
internal static class NativeCultureResolver
{
private static CultureInfo s_uiCulture = null;
private static CultureInfo s_culture = null;
private static object s_syncObject = new object();
/// <summary>
/// Gets the UICulture to be used by console host
/// </summary>
internal static CultureInfo UICulture
{
get
{
if (null == s_uiCulture)
{
lock (s_syncObject)
{
if (null == s_uiCulture)
{
s_uiCulture = GetUICulture();
}
}
}
return (CultureInfo)s_uiCulture.Clone();
}
}
internal static CultureInfo Culture
{
get
{
if (null == s_culture)
{
lock (s_syncObject)
{
if (null == s_culture)
{
s_culture = GetCulture();
}
}
}
return s_culture;
}
}
internal static CultureInfo GetUICulture()
{
return GetUICulture(true);
}
internal static CultureInfo GetCulture()
{
return GetCulture(true);
}
internal static CultureInfo GetUICulture(bool filterOutNonConsoleCultures)
{
if (!IsVistaAndLater())
{
s_uiCulture = EmulateDownLevel();
return s_uiCulture;
}
// We are running on Vista
string langBuffer = GetUserPreferredUILangs(filterOutNonConsoleCultures);
if (!string.IsNullOrEmpty(langBuffer))
{
try
{
string[] fallbacks = langBuffer.Split(new char[] { '\0' },
StringSplitOptions.RemoveEmptyEntries);
string fallback = fallbacks[0];
string[] fallbacksForParent = null;
if (fallbacks.Length > 1)
{
fallbacksForParent = new string[fallbacks.Length - 1];
Array.Copy(fallbacks, 1, fallbacksForParent, 0, fallbacks.Length - 1);
}
s_uiCulture = new VistaCultureInfo(fallback, fallbacksForParent);
return s_uiCulture;
}
catch (ArgumentException)
{
}
}
s_uiCulture = EmulateDownLevel();
return s_uiCulture;
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("GoldMan", "#pw17903:UseOfLCID", Justification = "In XP and below GetUserDefaultLocaleName is not available")]
internal static CultureInfo GetCulture(bool filterOutNonConsoleCultures)
{
CultureInfo returnValue;
try
{
if (!IsVistaAndLater())
{
int lcid = GetUserDefaultLCID();
returnValue = new CultureInfo(lcid);
}
else
{
// Vista and above
StringBuilder name = new StringBuilder(16);
if (0 == GetUserDefaultLocaleName(name, 16))
{
// ther is an error retrieving the culture,
// just use the current thread's culture
returnValue = CultureInfo.CurrentCulture;
}
else
{
returnValue = new CultureInfo(name.ToString().Trim());
}
}
if (filterOutNonConsoleCultures)
{
// filter out languages that console cannot display..
// Sometimes GetConsoleFallbackUICulture returns neutral cultures
// like "en" on "ar-SA". However neutral culture cannot be
// assigned as CurrentCulture. CreateSpecificCulture fixes
// this problem.
returnValue = CultureInfo.CreateSpecificCulture(
returnValue.GetConsoleFallbackUICulture().Name);
}
}
catch (ArgumentException)
{
// if there is any exception retrieving the
// culture, just use the current thread's culture.
returnValue = CultureInfo.CurrentCulture;
}
return returnValue;
}
[DllImport("kernel32.dll", SetLastError = false, CharSet = CharSet.Unicode)]
internal static extern WORD GetUserDefaultUILanguage();
/// <summary>
/// Constructs CultureInfo object without considering any Vista and later
/// custom culture fallback logic.
/// </summary>
/// <returns>A CultureInfo object</returns>
[System.Diagnostics.CodeAnalysis.SuppressMessage("GoldMan", "#pw17903:UseOfLCID", Justification = "This is only called In XP and below where GetUserDefaultLocaleName is not available, or as a fallback when GetThreadPreferredUILanguages fails")]
private static CultureInfo EmulateDownLevel()
{
// GetConsoleFallbackUICulture is not required.
// This is retained in order not to break existing code.
ushort langId = NativeCultureResolver.GetUserDefaultUILanguage();
CultureInfo ci = new CultureInfo((int)langId);
return ci.GetConsoleFallbackUICulture();
}
/// <summary>
/// Checks if the current operating system is Vista or later
/// </summary>
/// <returns>
/// true, if vista and above
/// false, otherwise.
/// </returns>
private static bool IsVistaAndLater()
{
// The version number is obtained from MSDN
// 4 - Windows NT 4.0, Windows Me, Windows 98, or Windows 95.
// 5 - Windows Server 2003 R2, Windows Server 2003, Windows XP, or Windows 2000.
// 6 - Windows Vista or Windows Server "Longhorn".
if (Environment.OSVersion.Version.Major >= 6)
{
return true;
}
return false;
}
/// <summary>
/// This method is called on vista and above.
/// Using GetThreadPreferredUILanguages this method gets
/// the UI languages a user has chosen.
/// </summary>
/// <returns>
/// List of ThreadPreferredUILanguages.
/// </returns>
/// <remarks>
/// This method will work only on Vista and later.
/// </remarks>
private static string GetUserPreferredUILangs(bool filterOutNonConsoleCultures)
{
long numberLangs = 0;
int bufferSize = 0;
string returnval = "";
if (filterOutNonConsoleCultures)
{
// Filter out languages that do not support console.
// The third parameter should be null otherwise this API will not
// set Console CodePage filter.
// The MSDN documentation does not call this out explicitly. Opened
// Bug 950 (Windows Developer Content) to track this.
if (!SetThreadPreferredUILanguages(s_MUI_CONSOLE_FILTER, null, IntPtr.Zero))
{
return returnval;
}
}
// calculate buffer size required
// MUI_MERGE_SYSTEM_FALLBACK | MUI_MERGE_USER_FALLBACK
// returns fallback cultures (specified by the user)
// and also adds neutral culture where appropriate.
// Ex: ja-jp ja en-us en
if (!GetThreadPreferredUILanguages(
s_MUI_LANGUAGE_NAME | s_MUI_MERGE_SYSTEM_FALLBACK | s_MUI_MERGE_USER_FALLBACK,
out numberLangs,
null,
out bufferSize))
{
return returnval;
}
// calculate space required to store output.
// StringBuilder will not work for this case as CLR
// does not copy the entire string if there are delimiter ('\0')
// in the middle of a string.
byte[] langBufferPtr = new byte[bufferSize * 2];
// Now get the actual value
if (!GetThreadPreferredUILanguages(
s_MUI_LANGUAGE_NAME | s_MUI_MERGE_SYSTEM_FALLBACK | s_MUI_MERGE_USER_FALLBACK,
out numberLangs,
langBufferPtr, // Pointer to a buffer in which this function retrieves an ordered, null-delimited list.
out bufferSize))
{
return returnval;
}
try
{
string langBuffer = Encoding.Unicode.GetString(langBufferPtr);
returnval = langBuffer.Trim().ToLowerInvariant();
return returnval;
}
catch (ArgumentNullException)
{
}
catch (System.Text.DecoderFallbackException)
{
}
return returnval;
}
#region Dll Import data
/// <summary>
/// Returns the locale identifier for the user default locale.
/// </summary>
/// <returns></returns>
/// <remarks>
/// This function can return data from custom locales. Locales are not
/// guaranteed to be the same from computer to computer or between runs
/// of an application. If your application must persist or transmit data,
/// see Using Persistent Locale Data.
/// Applications that are intended to run only on Windows Vista and later
/// should use GetUserDefaultLocaleName in preference to this function.
/// GetUserDefaultLocaleName provides good support for supplemental locales.
/// However, GetUserDefaultLocaleName is not supported for versions of Windows
/// prior to Windows Vista.
/// </remarks>
[DllImport("kernel32.dll", SetLastError = false, CharSet = CharSet.Unicode)]
private static extern int GetUserDefaultLCID();
/// <summary>
/// Retrieves the user default locale name.
/// </summary>
/// <param name="lpLocaleName"></param>
/// <param name="cchLocaleName"></param>
/// <returns>
/// Returns the size of the buffer containing the locale name, including
/// the terminating null character, if successful. The function returns 0
/// if it does not succeed. To get extended error information, the application
/// can call GetLastError. Possible returns from GetLastError
/// include ERR_INSUFFICIENT_BUFFER.
/// </returns>
/// <remarks>
///
/// </remarks>
[DllImport("kernel32.dll", SetLastError = false, CharSet = CharSet.Unicode)]
private static extern int GetUserDefaultLocaleName(
[MarshalAs(UnmanagedType.LPWStr)]
StringBuilder lpLocaleName,
int cchLocaleName);
[DllImport("kernel32.dll", SetLastError = false, CharSet = CharSet.Unicode)]
private static extern bool SetThreadPreferredUILanguages(int dwFlags,
StringBuilder pwszLanguagesBuffer,
IntPtr pulNumLanguages);
[DllImport("kernel32.dll", SetLastError = false, CharSet = CharSet.Unicode)]
private static extern bool GetThreadPreferredUILanguages(int dwFlags,
out long pulNumLanguages,
[MarshalAs(UnmanagedType.LPArray, SizeParamIndex = 2)]
byte[] pwszLanguagesBuffer,
out int pcchLanguagesBuffer);
[DllImport("kernel32.dll", SetLastError = false, CharSet = CharSet.Unicode)]
internal static extern Int16 SetThreadUILanguage(Int16 langId);
//private static int MUI_LANGUAGE_ID = 0x4;
private static int s_MUI_LANGUAGE_NAME = 0x8;
private static int s_MUI_CONSOLE_FILTER = 0x100;
private static int s_MUI_MERGE_USER_FALLBACK = 0x20;
private static int s_MUI_MERGE_SYSTEM_FALLBACK = 0x10;
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
** Purpose: Unsafe code that uses pointers should use
** SafePointer to fix subtle lifetime problems with the
** underlying resource.
**
===========================================================*/
// Design points:
// *) Avoid handle-recycling problems (including ones triggered via
// resurrection attacks) for all accesses via pointers. This requires tying
// together the lifetime of the unmanaged resource with the code that reads
// from that resource, in a package that uses synchronization to enforce
// the correct semantics during finalization. We're using SafeHandle's
// ref count as a gate on whether the pointer can be dereferenced because that
// controls the lifetime of the resource.
//
// *) Keep the penalties for using this class small, both in terms of space
// and time. Having multiple threads reading from a memory mapped file
// will already require 2 additional interlocked operations. If we add in
// a "current position" concept, that requires additional space in memory and
// synchronization. Since the position in memory is often (but not always)
// something that can be stored on the stack, we can save some memory by
// excluding it from this object. However, avoiding the need for
// synchronization is a more significant win. This design allows multiple
// threads to read and write memory simultaneously without locks (as long as
// you don't write to a region of memory that overlaps with what another
// thread is accessing).
//
// *) Space-wise, we use the following memory, including SafeHandle's fields:
// Object Header MT* handle int bool bool <2 pad bytes> length
// On 32 bit platforms: 24 bytes. On 64 bit platforms: 40 bytes.
// (We can safe 4 bytes on x86 only by shrinking SafeHandle)
//
// *) Wrapping a SafeHandle would have been a nice solution, but without an
// ordering between critical finalizable objects, it would have required
// changes to each SafeHandle subclass to opt in to being usable from a
// SafeBuffer (or some clever exposure of SafeHandle's state fields and a
// way of forcing ReleaseHandle to run even after the SafeHandle has been
// finalized with a ref count > 1). We can use less memory and create fewer
// objects by simply inserting a SafeBuffer into the class hierarchy.
//
// *) In an ideal world, we could get marshaling support for SafeBuffer that
// would allow us to annotate a P/Invoke declaration, saying this parameter
// specifies the length of the buffer, and the units of that length are X.
// P/Invoke would then pass that size parameter to SafeBuffer.
// [DllImport(...)]
// static extern SafeMemoryHandle AllocCharBuffer(int numChars);
// If we could put an attribute on the SafeMemoryHandle saying numChars is
// the element length, and it must be multiplied by 2 to get to the byte
// length, we can simplify the usage model for SafeBuffer.
//
// *) This class could benefit from a constraint saying T is a value type
// containing no GC references.
// Implementation notes:
// *) The Initialize method must be called before you use any instance of
// a SafeBuffer. To avoid races when storing SafeBuffers in statics,
// you either need to take a lock when publishing the SafeBuffer, or you
// need to create a local, initialize the SafeBuffer, then assign to the
// static variable (perhaps using Interlocked.CompareExchange). Of course,
// assignments in a static class constructor are under a lock implicitly.
using System;
using System.Diagnostics;
using System.Diagnostics.Contracts;
using System.Runtime.CompilerServices;
using System.Runtime.Versioning;
namespace System.Runtime.InteropServices
{
public abstract unsafe class SafeBuffer : SafeHandle
{
// Steal UIntPtr.MaxValue as our uninitialized value.
private static readonly UIntPtr Uninitialized = (UIntPtr.Size == 4) ?
((UIntPtr)UInt32.MaxValue) : ((UIntPtr)UInt64.MaxValue);
private UIntPtr _numBytes;
protected SafeBuffer(bool ownsHandle)
: base(IntPtr.Zero, ownsHandle)
{
_numBytes = Uninitialized;
}
// On the desktop CLR, SafeBuffer has access to the internal handle field since they're both in
// mscorlib. For this refactoring, we'll keep the name the same to minimize deltas, but shim
// through to DangerousGetHandle
private new IntPtr handle
{
get { return DangerousGetHandle(); }
}
public override bool IsInvalid
{
get { return DangerousGetHandle() == IntPtr.Zero || DangerousGetHandle() == new IntPtr(-1); }
}
/// <summary>
/// Specifies the size of the region of memory, in bytes. Must be
/// called before using the SafeBuffer.
/// </summary>
/// <param name="numBytes">Number of valid bytes in memory.</param>
[CLSCompliant(false)]
public void Initialize(ulong numBytes)
{
if (numBytes < 0)
throw new ArgumentOutOfRangeException("numBytes", SR.ArgumentOutOfRange_NeedNonNegNum);
if (IntPtr.Size == 4 && numBytes > UInt32.MaxValue)
throw new ArgumentOutOfRangeException("numBytes", SR.ArgumentOutOfRange_AddressSpace);
Contract.EndContractBlock();
if (numBytes >= (ulong)Uninitialized)
throw new ArgumentOutOfRangeException("numBytes", SR.ArgumentOutOfRange_UIntPtrMaxMinusOne);
_numBytes = (UIntPtr)numBytes;
}
/// <summary>
/// Specifies the the size of the region in memory, as the number of
/// elements in an array. Must be called before using the SafeBuffer.
/// </summary>
[CLSCompliant(false)]
public void Initialize(uint numElements, uint sizeOfEachElement)
{
if (numElements < 0)
throw new ArgumentOutOfRangeException("numElements", SR.ArgumentOutOfRange_NeedNonNegNum);
if (sizeOfEachElement < 0)
throw new ArgumentOutOfRangeException("sizeOfEachElement", SR.ArgumentOutOfRange_NeedNonNegNum);
if (IntPtr.Size == 4 && numElements * sizeOfEachElement > UInt32.MaxValue)
throw new ArgumentOutOfRangeException("numBytes", SR.ArgumentOutOfRange_AddressSpace);
Contract.EndContractBlock();
if (numElements * sizeOfEachElement >= (ulong)Uninitialized)
throw new ArgumentOutOfRangeException("numElements", SR.ArgumentOutOfRange_UIntPtrMaxMinusOne);
_numBytes = checked((UIntPtr)(numElements * sizeOfEachElement));
}
/// <summary>
/// Specifies the the size of the region in memory, as the number of
/// elements in an array. Must be called before using the SafeBuffer.
/// </summary>
[CLSCompliant(false)]
public void Initialize<T>(uint numElements) where T : struct
{
Initialize(numElements, AlignedSizeOf<T>());
}
// Callers should ensure that they check whether the pointer ref param
// is null when AcquirePointer returns. If it is not null, they must
// call ReleasePointer in a CER. This method calls DangerousAddRef
// & exposes the pointer. Unlike Read, it does not alter the "current
// position" of the pointer. Here's how to use it:
//
// byte* pointer = null;
// RuntimeHelpers.PrepareConstrainedRegions();
// try {
// safeBuffer.AcquirePointer(ref pointer);
// // Use pointer here, with your own bounds checking
// }
// finally {
// if (pointer != null)
// safeBuffer.ReleasePointer();
// }
//
// Note: If you cast this byte* to a T*, you have to worry about
// whether your pointer is aligned. Additionally, you must take
// responsibility for all bounds checking with this pointer.
/// <summary>
/// Obtain the pointer from a SafeBuffer for a block of code,
/// with the express responsibility for bounds checking and calling
/// ReleasePointer later within a CER to ensure the pointer can be
/// freed later. This method either completes successfully or
/// throws an exception and returns with pointer set to null.
/// </summary>
/// <param name="pointer">A byte*, passed by reference, to receive
/// the pointer from within the SafeBuffer. You must set
/// pointer to null before calling this method.</param>
[CLSCompliant(false)]
public void AcquirePointer(ref byte* pointer)
{
if (_numBytes == Uninitialized)
throw NotInitialized();
pointer = null;
try
{
}
finally
{
bool junk = false;
DangerousAddRef(ref junk);
pointer = (byte*)handle;
}
}
public void ReleasePointer()
{
if (_numBytes == Uninitialized)
throw NotInitialized();
DangerousRelease();
}
/// <summary>
/// Read a value type from memory at the given offset. This is
/// equivalent to: return *(T*)(bytePtr + byteOffset);
/// </summary>
/// <typeparam name="T">The value type to read</typeparam>
/// <param name="byteOffset">Where to start reading from memory. You
/// may have to consider alignment.</param>
/// <returns>An instance of T read from memory.</returns>
[CLSCompliant(false)]
public T Read<T>(ulong byteOffset) where T : struct
{
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = SizeOfType<T>();
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, sizeofT);
// return *(T*) (_ptr + byteOffset);
T value;
bool mustCallRelease = false;
try
{
DangerousAddRef(ref mustCallRelease);
GenericPtrToStructure<T>(ptr, out value, sizeofT);
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
return value;
}
[CLSCompliant(false)]
public void ReadArray<T>(ulong byteOffset, T[] array, int index, int count)
where T : struct
{
if (array == null)
throw new ArgumentNullException("array", SR.ArgumentNull_Buffer);
if (index < 0)
throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
if (array.Length - index < count)
throw new ArgumentException(SR.Argument_InvalidOffLength);
Contract.EndContractBlock();
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = SizeOfType<T>();
uint alignedSizeofT = AlignedSizeOf<T>();
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, checked((ulong)(alignedSizeofT * count)));
bool mustCallRelease = false;
try
{
DangerousAddRef(ref mustCallRelease);
for (int i = 0; i < count; i++)
unsafe { GenericPtrToStructure<T>(ptr + alignedSizeofT * i, out array[i + index], sizeofT); }
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
}
/// <summary>
/// Write a value type to memory at the given offset. This is
/// equivalent to: *(T*)(bytePtr + byteOffset) = value;
/// </summary>
/// <typeparam name="T">The type of the value type to write to memory.</typeparam>
/// <param name="byteOffset">The location in memory to write to. You
/// may have to consider alignment.</param>
/// <param name="value">The value type to write to memory.</param>
[CLSCompliant(false)]
public void Write<T>(ulong byteOffset, T value) where T : struct
{
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = SizeOfType<T>();
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, sizeofT);
// *((T*) (_ptr + byteOffset)) = value;
bool mustCallRelease = false;
try
{
DangerousAddRef(ref mustCallRelease);
GenericStructureToPtr(ref value, ptr, sizeofT);
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
}
[CLSCompliant(false)]
public void WriteArray<T>(ulong byteOffset, T[] array, int index, int count)
where T : struct
{
if (array == null)
throw new ArgumentNullException("array", SR.ArgumentNull_Buffer);
if (index < 0)
throw new ArgumentOutOfRangeException("index", SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
if (array.Length - index < count)
throw new ArgumentException(SR.Argument_InvalidOffLength);
Contract.EndContractBlock();
if (_numBytes == Uninitialized)
throw NotInitialized();
uint sizeofT = SizeOfType<T>();
uint alignedSizeofT = AlignedSizeOf<T>();
byte* ptr = (byte*)handle + byteOffset;
SpaceCheck(ptr, checked((ulong)(alignedSizeofT * count)));
bool mustCallRelease = false;
try
{
DangerousAddRef(ref mustCallRelease);
for (int i = 0; i < count; i++)
unsafe { GenericStructureToPtr(ref array[i + index], ptr + alignedSizeofT * i, sizeofT); }
}
finally
{
if (mustCallRelease)
DangerousRelease();
}
}
/// <summary>
/// Returns the number of bytes in the memory region.
/// </summary>
[CLSCompliant(false)]
public ulong ByteLength
{
get
{
if (_numBytes == Uninitialized)
throw NotInitialized();
return (ulong)_numBytes;
}
}
/* No indexer. The perf would be misleadingly bad. People should use
* AcquirePointer and ReleasePointer instead. */
private void SpaceCheck(byte* ptr, ulong sizeInBytes)
{
if ((ulong)_numBytes < sizeInBytes)
NotEnoughRoom();
if ((ulong)(ptr - (byte*)handle) > ((ulong)_numBytes) - sizeInBytes)
NotEnoughRoom();
}
private static void NotEnoughRoom()
{
throw new ArgumentException(SR.Arg_BufferTooSmall);
}
private static InvalidOperationException NotInitialized()
{
Contract.Assert(false, "Uninitialized SafeBuffer! Someone needs to call Initialize before using this instance!");
return new InvalidOperationException(SR.InvalidOperation_MustCallInitialize);
}
internal static void GenericPtrToStructure<T>(byte* ptr, out T structure, uint sizeofT) where T : struct
{
RuntimeTypeHandle structureTypeHandle = typeof(T).TypeHandle;
if (!structureTypeHandle.IsBlittable())
throw new ArgumentException(SR.Argument_NeedStructWithNoRefs);
Object boxedStruct = new T();
InteropExtensions.PinObjectAndCall(boxedStruct,
unboxedStructPtr =>
{
InteropExtensions.Memcpy(
(IntPtr)((IntPtr*)unboxedStructPtr + 1), // safe (need to adjust offset as boxed structure start at offset 1)
(IntPtr)ptr, // unsafe (no need to adjust as it is always struct)
(int)sizeofT
);
});
structure = (T)boxedStruct;
}
internal static void GenericStructureToPtr<T>(ref T structure, byte* ptr, uint sizeofT) where T : struct
{
RuntimeTypeHandle structureTypeHandle = structure.GetType().TypeHandle;
if (!structureTypeHandle.IsBlittable())
throw new ArgumentException(SR.Argument_NeedStructWithNoRefs);
InteropExtensions.PinObjectAndCall((Object)structure,
unboxedStructPtr =>
{
InteropExtensions.Memcpy(
(IntPtr)ptr, // unsafe (no need to adjust as it is always struct)
(IntPtr)((IntPtr*)unboxedStructPtr + 1), // safe (need to adjust offset as boxed structure start at offset 1)
(int)sizeofT
);
});
}
#region "SizeOf Helpers"
/// <summary>
/// Returns the aligned size of an instance of a value type.
/// </summary>
private static uint AlignedSizeOf<T>() where T : struct
{
uint size = SizeOfType<T>();
if (size == 1 || size == 2)
{
return size;
}
if (IntPtr.Size == 8 && size == 4)
{
return size;
}
return (uint)(((size + 3) & (~3)));
}
private static uint SizeOfType<T>() where T : struct
{
return (uint)SizeOf(typeof(T));
}
[Pure]
private static int SizeOf(Type t)
{
Debug.Assert(t != null, "t");
if (t.TypeHandle.IsGenericType())
throw new ArgumentException(SR.Argument_NeedNonGenericType, "t");
RuntimeTypeHandle typeHandle = t.TypeHandle;
if (!(typeHandle.IsBlittable() && typeHandle.IsValueType()))
throw new ArgumentException(SR.Argument_NeedStructWithNoRefs);
return typeHandle.GetValueTypeSize();
}
#endregion
}
}
| |
using System;
using LanguageExt.Effects.Traits;
namespace LanguageExt.Pipes
{
public abstract class Lift<RT, A> where RT : struct, HasCancel<RT>
{
public abstract Lift<RT, B> Map<B>(Func<A, B> f);
public abstract Lift<RT, B> Bind<B>(Func<A, Lift<RT, B>> f);
public abstract Producer<RT, OUT, A> ToProducer<OUT>();
public abstract ProducerLift<RT, OUT, A> ToProducerLift<OUT>();
public abstract Consumer<RT, IN, A> ToConsumer<IN>();
public abstract ConsumerLift<RT, IN, A> ToConsumerLift<IN>();
public abstract Pipe<RT, IN, OUT, A> ToPipe<IN, OUT>();
public static implicit operator Lift<RT, A>(Pipes.Pure<A> ma) =>
new Pure(ma.Value);
public class Pure : Lift<RT, A>
{
public readonly A Value;
public Pure(A value) =>
Value = value;
public override Lift<RT, B> Map<B>(Func<A, B> f) =>
new Lift<RT, B>.Pure(f(Value));
public override Lift<RT, B> Bind<B>(Func<A, Lift<RT, B>> f) =>
f(Value);
public override Producer<RT, OUT, A> ToProducer<OUT>() =>
Producer.Pure<RT, OUT, A>(Value);
public override ProducerLift<RT, OUT, A> ToProducerLift<OUT>() =>
new ProducerLift<RT, OUT, A>.Pure(Value);
public override Consumer<RT, IN, A> ToConsumer<IN>() =>
Consumer.Pure<RT, IN, A>(Value);
public override ConsumerLift<RT, IN, A> ToConsumerLift<IN>() =>
new ConsumerLift<RT, IN, A>.Pure(Value);
public override Pipe<RT, IN, OUT, A> ToPipe<IN, OUT>() =>
Pipe.Pure<RT, IN, OUT, A>(Value);
}
public class Do<X> : Lift<RT, A>
{
public readonly Aff<RT, X> Effect;
public readonly Func<X, Lift<RT, A>> Next;
public Do(Aff<RT, X> value, Func<X, Lift<RT, A>> next) =>
(Effect, Next) = (value, next);
public override Lift<RT, B> Map<B>(Func<A, B> f) =>
new Lift<RT, B>.Do<X>(Effect, n => Next(n).Select(f));
public override Lift<RT, B> Bind<B>(Func<A, Lift<RT, B>> f) =>
new Lift<RT, B>.Do<X>(Effect, n => Next(n).Bind(f));
public override ConsumerLift<RT, IN, A> ToConsumerLift<IN>() =>
new ConsumerLift<RT, IN, A>.Lift<X>(Effect, x => Next(x).ToConsumerLift<IN>());
public override Producer<RT, OUT, A> ToProducer<OUT>() =>
from x in Producer.lift<RT, OUT, X>(Effect)
from r in Next(x).ToProducer<OUT>()
select r;
public override ProducerLift<RT, OUT, A> ToProducerLift<OUT>() =>
new ProducerLift<RT, OUT, A>.Lift<X>(Effect, x => Next(x).ToProducerLift<OUT>());
public override Consumer<RT, IN, A> ToConsumer<IN>() =>
from x in Consumer.lift<RT, IN, X>(Effect)
from r in Next(x).ToConsumer<IN>()
select r;
public override Pipe<RT, IN, OUT, A> ToPipe<IN, OUT>() =>
from x in Pipe.lift<RT, IN, OUT, X>(Effect)
from r in Next(x).ToPipe<IN, OUT>()
select r;
}
}
public static class Lift
{
public static Lift<RT, A> Pure<RT, A>(A value) where RT : struct, HasCancel<RT> =>
new Lift<RT, A>.Pure(value);
public static Lift<RT, A> Aff<RT, A>(Aff<RT, A> value) where RT : struct, HasCancel<RT> =>
new Lift<RT, A>.Do<A>(value, Pure<RT, A>);
public static Lift<RT, A> Eff<RT, A>(Eff<RT, A> value) where RT : struct, HasCancel<RT> =>
new Lift<RT, A>.Do<A>(value, Pure<RT, A>);
public static Lift<RT, B> Select<RT, A, B>(this Lift<RT, A> ma, Func<A, B> f) where RT : struct, HasCancel<RT> =>
ma.Map(f);
public static Lift<RT, B> SelectMany<RT, A, B>(this Lift<RT, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
ma.Bind(f);
public static Lift<RT, C> SelectMany<RT, A, B, C>(this Lift<RT, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
ma.SelectMany(a => f(a).Select(b => project(a, b)));
public static Lift<RT, B> SelectMany<RT, A, B>(this Lift<RT, A> ma, Func<A, Aff<RT, B>> f) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>));
public static Lift<RT, C> SelectMany<RT, A, B, C>(this Lift<RT, A> ma, Func<A, Aff<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>).Map(y => project(x, y)));
public static Lift<RT, B> SelectMany<RT, A, B>(this Lift<RT, A> ma, Func<A, Eff<RT, B>> f) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>));
public static Lift<RT, C> SelectMany<RT, A, B, C>(this Lift<RT, A> ma, Func<A, Eff<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>).Map(y => project(x, y)));
public static Lift<RT, B> SelectMany<RT, A, B>(this Lift<RT, A> ma, Func<A, Aff<B>> f) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>));
public static Lift<RT, C> SelectMany<RT, A, B, C>(this Lift<RT, A> ma, Func<A, Aff<B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>).Map(y => project(x, y)));
public static Lift<RT, B> SelectMany<RT, A, B>(this Lift<RT, A> ma, Func<A, Eff<B>> f) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>));
public static Lift<RT, C> SelectMany<RT, A, B, C>(this Lift<RT, A> ma, Func<A, Eff<B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
ma.Bind(x => new Lift<RT, B>.Do<B>(f(x), Pure<RT, B>).Map(y => project(x, y)));
public static Consumer<RT, IN, B> SelectMany<RT, IN, A, B>(this Lift<RT, A> ma, Func<A, Consumer<IN, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToConsumer<IN>()
from b in f(a)
select b;
public static Consumer<RT, IN, C> SelectMany<RT, IN, A, B, C>(this Lift<RT, A> ma, Func<A, Consumer<IN, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToConsumer<IN>()
from b in f(a)
select project(a, b);
public static ProducerLift<RT, OUT, B> SelectMany<RT, OUT, A, B>(this Lift<RT, A> ma, Func<A, Producer<OUT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducerLift<OUT>()
from b in f(a).ToProducerLift<RT>()
select b;
public static ProducerLift<RT, OUT, B> SelectMany<RT, OUT, A, B>(this Lift<RT, A> ma, Func<A, ProducerLift<RT, OUT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducerLift<OUT>()
from b in f(a)
select b;
public static ProducerLift<RT, OUT, C> SelectMany<RT, OUT, A, B, C>(this Lift<RT, A> ma, Func<A, Producer<OUT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducerLift<OUT>()
from b in f(a).ToProducerLift<RT>()
select project(a, b);
public static ProducerLift<RT, OUT, C> SelectMany<RT, OUT, A, B, C>(this Lift<RT, A> ma, Func<A, ProducerLift<RT, OUT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducerLift<OUT>()
from b in f(a)
select project(a, b);
public static Pipe<RT, IN, OUT, B> SelectMany<RT, IN, OUT, A, B>(this Lift<RT, A> ma, Func<A, Pipe<IN, OUT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToPipe<IN, OUT>()
from b in f(a)
select b;
public static Pipe<RT, IN, OUT, C> SelectMany<RT, IN, OUT, A, B, C>(this Lift<RT, A> ma, Func<A, Pipe<IN, OUT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToPipe<IN, OUT>()
from b in f(a)
select project(a, b);
public static ConsumerLift<RT, IN, B> SelectMany<RT, IN, A, B>(this Consumer<IN, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToConsumerLift<RT>()
from b in f(a).ToConsumerLift<IN>()
select b;
public static ConsumerLift<RT, IN, C> SelectMany<RT, IN, A, B, C>(this Consumer<IN, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToConsumerLift<RT>()
from b in f(a).ToConsumerLift<IN>()
select project(a, b);
public static ProducerLift<RT, OUT, B> SelectMany<RT, OUT, A, B>(this Producer<OUT, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducerLift<RT>()
from b in f(a).ToProducerLift<OUT>()
select b;
public static ProducerLift<RT, OUT, C> SelectMany<RT, OUT, A, B, C>(this Producer<OUT, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducerLift<RT>()
from b in f(a).ToProducerLift<OUT>()
select project(a, b);
public static Pipe<RT, IN, OUT, B> SelectMany<RT, IN, OUT, A, B>(this Pipe<IN, OUT, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToPipe<IN, OUT>()
select b;
public static Pipe<RT, IN, OUT, C> SelectMany<RT, IN, OUT, A, B, C>(this Pipe<IN, OUT, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToPipe<IN, OUT>()
select project(a, b);
public static Consumer<RT, IN, B> SelectMany<RT, IN, A, B>(this Lift<RT, A> ma, Func<A, Consumer<RT, IN, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToConsumer<IN>()
from b in f(a)
select b;
public static Consumer<RT, IN, C> SelectMany<RT, IN, A, B, C>(this Lift<RT, A> ma, Func<A, Consumer<RT, IN, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToConsumer<IN>()
from b in f(a)
select project(a, b);
public static Producer<RT, OUT, B> SelectMany<RT, OUT, A, B>(this Lift<RT, A> ma, Func<A, Producer<RT, OUT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducer<OUT>()
from b in f(a)
select b;
public static Producer<RT, OUT, C> SelectMany<RT, OUT, A, B, C>(this Lift<RT, A> ma, Func<A, Producer<RT, OUT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToProducer<OUT>()
from b in f(a)
select project(a, b);
public static Pipe<RT, IN, OUT, B> SelectMany<RT, IN, OUT, A, B>(this Lift<RT, A> ma, Func<A, Pipe<RT, IN, OUT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma.ToPipe<IN, OUT>()
from b in f(a)
select b;
public static Pipe<RT, IN, OUT, C> SelectMany<RT, IN, OUT, A, B, C>(this Lift<RT, A> ma, Func<A, Pipe<RT, IN, OUT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma.ToPipe<IN, OUT>()
from b in f(a)
select project(a, b);
public static Consumer<RT, IN, B> SelectMany<RT, IN, A, B>(this Consumer<RT, IN, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a)
select b;
public static Consumer<RT, IN, C> SelectMany<RT, IN, A, B, C>(this Consumer<RT, IN, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToConsumer<IN>()
select project(a, b);
public static Producer<RT, OUT, B> SelectMany<RT, OUT, A, B>(this Producer<RT, OUT, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToProducer<OUT>()
select b;
public static Producer<RT, OUT, C> SelectMany<RT, OUT, A, B, C>(this Producer<RT, OUT, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToProducer<OUT>()
select project(a, b);
public static Pipe<RT, IN, OUT, B> SelectMany<RT, IN, OUT, A, B>(this Pipe<RT, IN, OUT, A> ma, Func<A, Lift<RT, B>> f) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToPipe<IN, OUT>()
select b;
public static Pipe<RT, IN, OUT, C> SelectMany<RT, IN, OUT, A, B, C>(this Pipe<RT, IN, OUT, A> ma, Func<A, Lift<RT, B>> f, Func<A, B, C> project) where RT : struct, HasCancel<RT> =>
from a in ma
from b in f(a).ToPipe<IN, OUT>()
select project(a, b);
}
}
| |
using System;
using BigMath;
using NUnit.Framework;
using Raksha.Crypto;
using Raksha.Crypto.Generators;
using Raksha.Crypto.Parameters;
using Raksha.Math;
using Raksha.Security;
using Raksha.Tests.Utilities;
namespace Raksha.Tests.Crypto
{
internal class DHTestKeyParameters
: DHKeyParameters
{
public DHTestKeyParameters(
bool isPrivate,
DHParameters parameters)
: base(isPrivate, parameters)
{
}
}
internal class ElGamalTestKeyParameters
: ElGamalKeyParameters
{
public ElGamalTestKeyParameters(
bool isPrivate,
ElGamalParameters parameters)
: base(isPrivate, parameters)
{
}
}
[TestFixture]
public class EqualsHashCodeTest
: SimpleTest
{
private static object Other = new object();
public override string Name
{
get { return "EqualsHashCode"; }
}
private void doTest(
object a,
object equalsA,
object notEqualsA)
{
if (a.Equals(null))
{
Fail("a equaled null");
}
if (!a.Equals(equalsA) || !equalsA.Equals(a))
{
Fail("equality failed");
}
if (a.Equals(Other))
{
Fail("other inequality failed");
}
if (a.Equals(notEqualsA) || notEqualsA.Equals(a))
{
Fail("inequality failed");
}
if (a.GetHashCode() != equalsA.GetHashCode())
{
Fail("hashCode equality failed");
}
}
[Test]
public void TestDH()
{
BigInteger g512 = new BigInteger("153d5d6172adb43045b68ae8e1de1070b6137005686d29d3d73a7749199681ee5b212c9b96bfdcfa5b20cd5e3fd2044895d609cf9b410b7a0f12ca1cb9a428cc", 16);
BigInteger p512 = new BigInteger("9494fec095f3b85ee286542b3836fc81a5dd0a0349b4c239dd38744d488cf8e31db8bcb7d33b41abb9e5a33cca9144b1cef332c94bf0573bf047a3aca98cdf3b", 16);
DHParameters dhParams = new DHParameters(p512, g512);
DHKeyGenerationParameters parameters = new DHKeyGenerationParameters(new SecureRandom(), dhParams); DHKeyPairGenerator kpGen = new DHKeyPairGenerator();
kpGen.Init(parameters);
AsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair();
DHPublicKeyParameters pu1 = (DHPublicKeyParameters)pair.Public;
DHPrivateKeyParameters pv1 = (DHPrivateKeyParameters)pair.Private;
DHPublicKeyParameters pu2 = new DHPublicKeyParameters(pu1.Y, pu1.Parameters);
DHPrivateKeyParameters pv2 = new DHPrivateKeyParameters(pv1.X, pv1.Parameters);
DHPublicKeyParameters pu3 = new DHPublicKeyParameters(pv1.X, pu1.Parameters);
DHPrivateKeyParameters pv3 = new DHPrivateKeyParameters(pu1.Y, pu1.Parameters);
doTest(pu1, pu2, pu3);
doTest(pv1, pv2, pv3);
DHParameters pr1 = pu1.Parameters;
DHParameters pr2 = new DHParameters(
pr1.P, pr1.G, pr1.Q, pr1.M, pr1.L, pr1.J, pr1.ValidationParameters);
DHParameters pr3 = new DHParameters(
pr1.P.Add(BigInteger.Two), pr1.G, pr1.Q, pr1.M, pr1.L, pr1.J, pr1.ValidationParameters);
doTest(pr1, pr2, pr3);
pr3 = new DHParameters(
pr1.P, pr1.G.Add(BigInteger.One), pr1.Q, pr1.M, pr1.L, pr1.J, pr1.ValidationParameters);
doTest(pr1, pr2, pr3);
pu2 = new DHPublicKeyParameters(pu1.Y, pr2);
pv2 = new DHPrivateKeyParameters(pv1.X, pr2);
doTest(pu1, pu2, pu3);
doTest(pv1, pv2, pv3);
DHValidationParameters vp1 = new DHValidationParameters(new byte[20], 1024);
DHValidationParameters vp2 = new DHValidationParameters(new byte[20], 1024);
DHValidationParameters vp3 = new DHValidationParameters(new byte[24], 1024);
doTest(vp1, vp1, vp3);
doTest(vp1, vp2, vp3);
byte[] bytes = new byte[20];
bytes[0] = 1;
vp3 = new DHValidationParameters(bytes, 1024);
doTest(vp1, vp2, vp3);
vp3 = new DHValidationParameters(new byte[20], 2048);
doTest(vp1, vp2, vp3);
DHTestKeyParameters k1 = new DHTestKeyParameters(false, null);
DHTestKeyParameters k2 = new DHTestKeyParameters(false, null);
DHTestKeyParameters k3 = new DHTestKeyParameters(false, pu1.Parameters);
doTest(k1, k2, k3);
}
[Test]
public void TestElGamal()
{
BigInteger g512 = new BigInteger("153d5d6172adb43045b68ae8e1de1070b6137005686d29d3d73a7749199681ee5b212c9b96bfdcfa5b20cd5e3fd2044895d609cf9b410b7a0f12ca1cb9a428cc", 16);
BigInteger p512 = new BigInteger("9494fec095f3b85ee286542b3836fc81a5dd0a0349b4c239dd38744d488cf8e31db8bcb7d33b41abb9e5a33cca9144b1cef332c94bf0573bf047a3aca98cdf3b", 16);
ElGamalParameters dhParams = new ElGamalParameters(p512, g512);
ElGamalKeyGenerationParameters parameters = new ElGamalKeyGenerationParameters(new SecureRandom(), dhParams); ElGamalKeyPairGenerator kpGen = new ElGamalKeyPairGenerator();
kpGen.Init(parameters);
AsymmetricCipherKeyPair pair = kpGen.GenerateKeyPair();
ElGamalPublicKeyParameters pu1 = (ElGamalPublicKeyParameters)pair.Public;
ElGamalPrivateKeyParameters pv1 = (ElGamalPrivateKeyParameters)pair.Private;
ElGamalPublicKeyParameters pu2 = new ElGamalPublicKeyParameters(pu1.Y, pu1.Parameters);
ElGamalPrivateKeyParameters pv2 = new ElGamalPrivateKeyParameters(pv1.X, pv1.Parameters);
ElGamalPublicKeyParameters pu3 = new ElGamalPublicKeyParameters(pv1.X, pu1.Parameters);
ElGamalPrivateKeyParameters pv3 = new ElGamalPrivateKeyParameters(pu1.Y, pu1.Parameters);
doTest(pu1, pu2, pu3);
doTest(pv1, pv2, pv3);
ElGamalParameters pr1 = pu1.Parameters;
ElGamalParameters pr2 = new ElGamalParameters(pr1.P, pr1.G);
ElGamalParameters pr3 = new ElGamalParameters(pr1.G, pr1.P);
doTest(pr1, pr2, pr3);
pu2 = new ElGamalPublicKeyParameters(pu1.Y, pr2);
pv2 = new ElGamalPrivateKeyParameters(pv1.X, pr2);
doTest(pu1, pu2, pu3);
doTest(pv1, pv2, pv3);
ElGamalTestKeyParameters k1 = new ElGamalTestKeyParameters(false, null);
ElGamalTestKeyParameters k2 = new ElGamalTestKeyParameters(false, null);
ElGamalTestKeyParameters k3 = new ElGamalTestKeyParameters(false, pu1.Parameters);
doTest(k1, k2, k3);
}
[Test]
public void TestDsa()
{
BigInteger a = BigInteger.ValueOf(1), b = BigInteger.ValueOf(2), c = BigInteger.ValueOf(3);
DsaParameters dsaP1 = new DsaParameters(a, b, c);
DsaParameters dsaP2 = new DsaParameters(a, b, c);
DsaParameters dsaP3 = new DsaParameters(b, c, a);
doTest(dsaP1, dsaP2, dsaP3);
DsaValidationParameters vp1 = new DsaValidationParameters(new byte[20], 1024);
DsaValidationParameters vp2 = new DsaValidationParameters(new byte[20], 1024);
DsaValidationParameters vp3 = new DsaValidationParameters(new byte[24], 1024);
doTest(vp1, vp1, vp3);
doTest(vp1, vp2, vp3);
byte[] bytes = new byte[20];
bytes[0] = 1;
vp3 = new DsaValidationParameters(bytes, 1024);
doTest(vp1, vp2, vp3);
vp3 = new DsaValidationParameters(new byte[20], 2048);
doTest(vp1, vp2, vp3);
}
[Test]
public void TestGost3410()
{
BigInteger a = BigInteger.ValueOf(1), b = BigInteger.ValueOf(2), c = BigInteger.ValueOf(3);
Gost3410Parameters g1 = new Gost3410Parameters(a, b, c);
Gost3410Parameters g2 = new Gost3410Parameters(a, b, c);
Gost3410Parameters g3 = new Gost3410Parameters(a, c, c);
doTest(g1, g2, g3);
Gost3410ValidationParameters v1 = new Gost3410ValidationParameters(100, 1);
Gost3410ValidationParameters v2 = new Gost3410ValidationParameters(100, 1);
Gost3410ValidationParameters v3 = new Gost3410ValidationParameters(101, 1);
doTest(v1, v2, v3);
v3 = new Gost3410ValidationParameters(100, 2);
doTest(v1, v2, v3);
v1 = new Gost3410ValidationParameters(100L, 1L);
v2 = new Gost3410ValidationParameters(100L, 1L);
v3 = new Gost3410ValidationParameters(101L, 1L);
doTest(v1, v2, v3);
v3 = new Gost3410ValidationParameters(100L, 2L);
doTest(v1, v2, v3);
}
public override void PerformTest()
{
TestDH();
TestElGamal();
TestGost3410();
TestDsa();
}
public static void Main(
string[] args)
{
RunTest(new EqualsHashCodeTest());
}
}
}
| |
// Copyright 2018 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
namespace Google.Cloud.Datastore.V1.Tests
{
using Google.Api.Gax;
using Google.Api.Gax.Grpc;
using apis = Google.Cloud.Datastore.V1;
using Google.Protobuf;
using Google.Protobuf.WellKnownTypes;
using Grpc.Core;
using Moq;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
/// <summary>Generated unit tests</summary>
public class GeneratedDatastoreClientTest
{
[Fact]
public void Lookup()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
LookupRequest expectedRequest = new LookupRequest
{
ProjectId = "projectId-1969970175",
ReadOptions = new ReadOptions(),
Keys = { },
};
LookupResponse expectedResponse = new LookupResponse();
mockGrpcClient.Setup(x => x.Lookup(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
ReadOptions readOptions = new ReadOptions();
IEnumerable<Key> keys = new List<Key>();
LookupResponse response = client.Lookup(projectId, readOptions, keys);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task LookupAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
LookupRequest expectedRequest = new LookupRequest
{
ProjectId = "projectId-1969970175",
ReadOptions = new ReadOptions(),
Keys = { },
};
LookupResponse expectedResponse = new LookupResponse();
mockGrpcClient.Setup(x => x.LookupAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<LookupResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
ReadOptions readOptions = new ReadOptions();
IEnumerable<Key> keys = new List<Key>();
LookupResponse response = await client.LookupAsync(projectId, readOptions, keys);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void Lookup2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
LookupRequest request = new LookupRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
LookupResponse expectedResponse = new LookupResponse();
mockGrpcClient.Setup(x => x.Lookup(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
LookupResponse response = client.Lookup(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task LookupAsync2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
LookupRequest request = new LookupRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
LookupResponse expectedResponse = new LookupResponse();
mockGrpcClient.Setup(x => x.LookupAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<LookupResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
LookupResponse response = await client.LookupAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void RunQuery()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
RunQueryRequest request = new RunQueryRequest
{
ProjectId = "projectId-1969970175",
PartitionId = new PartitionId(),
};
RunQueryResponse expectedResponse = new RunQueryResponse();
mockGrpcClient.Setup(x => x.RunQuery(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
RunQueryResponse response = client.RunQuery(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task RunQueryAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
RunQueryRequest request = new RunQueryRequest
{
ProjectId = "projectId-1969970175",
PartitionId = new PartitionId(),
};
RunQueryResponse expectedResponse = new RunQueryResponse();
mockGrpcClient.Setup(x => x.RunQueryAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<RunQueryResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
RunQueryResponse response = await client.RunQueryAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void BeginTransaction()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
BeginTransactionRequest expectedRequest = new BeginTransactionRequest
{
ProjectId = "projectId-1969970175",
};
BeginTransactionResponse expectedResponse = new BeginTransactionResponse
{
Transaction = ByteString.CopyFromUtf8("-34"),
};
mockGrpcClient.Setup(x => x.BeginTransaction(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
BeginTransactionResponse response = client.BeginTransaction(projectId);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task BeginTransactionAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
BeginTransactionRequest expectedRequest = new BeginTransactionRequest
{
ProjectId = "projectId-1969970175",
};
BeginTransactionResponse expectedResponse = new BeginTransactionResponse
{
Transaction = ByteString.CopyFromUtf8("-34"),
};
mockGrpcClient.Setup(x => x.BeginTransactionAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<BeginTransactionResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
BeginTransactionResponse response = await client.BeginTransactionAsync(projectId);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void BeginTransaction2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
BeginTransactionRequest request = new BeginTransactionRequest
{
ProjectId = "projectId-1969970175",
};
BeginTransactionResponse expectedResponse = new BeginTransactionResponse
{
Transaction = ByteString.CopyFromUtf8("-34"),
};
mockGrpcClient.Setup(x => x.BeginTransaction(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
BeginTransactionResponse response = client.BeginTransaction(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task BeginTransactionAsync2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
BeginTransactionRequest request = new BeginTransactionRequest
{
ProjectId = "projectId-1969970175",
};
BeginTransactionResponse expectedResponse = new BeginTransactionResponse
{
Transaction = ByteString.CopyFromUtf8("-34"),
};
mockGrpcClient.Setup(x => x.BeginTransactionAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<BeginTransactionResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
BeginTransactionResponse response = await client.BeginTransactionAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void Commit()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
CommitRequest expectedRequest = new CommitRequest
{
ProjectId = "projectId-1969970175",
Mode = CommitRequest.Types.Mode.Unspecified,
Transaction = ByteString.CopyFromUtf8("-34"),
Mutations = { },
};
CommitResponse expectedResponse = new CommitResponse
{
IndexUpdates = 1425228195,
};
mockGrpcClient.Setup(x => x.Commit(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
CommitRequest.Types.Mode mode = CommitRequest.Types.Mode.Unspecified;
ByteString transaction = ByteString.CopyFromUtf8("-34");
IEnumerable<Mutation> mutations = new List<Mutation>();
CommitResponse response = client.Commit(projectId, mode, transaction, mutations);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task CommitAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
CommitRequest expectedRequest = new CommitRequest
{
ProjectId = "projectId-1969970175",
Mode = CommitRequest.Types.Mode.Unspecified,
Transaction = ByteString.CopyFromUtf8("-34"),
Mutations = { },
};
CommitResponse expectedResponse = new CommitResponse
{
IndexUpdates = 1425228195,
};
mockGrpcClient.Setup(x => x.CommitAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<CommitResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
CommitRequest.Types.Mode mode = CommitRequest.Types.Mode.Unspecified;
ByteString transaction = ByteString.CopyFromUtf8("-34");
IEnumerable<Mutation> mutations = new List<Mutation>();
CommitResponse response = await client.CommitAsync(projectId, mode, transaction, mutations);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void Commit2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
CommitRequest expectedRequest = new CommitRequest
{
ProjectId = "projectId-1969970175",
Mode = CommitRequest.Types.Mode.Unspecified,
Mutations = { },
};
CommitResponse expectedResponse = new CommitResponse
{
IndexUpdates = 1425228195,
};
mockGrpcClient.Setup(x => x.Commit(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
CommitRequest.Types.Mode mode = CommitRequest.Types.Mode.Unspecified;
IEnumerable<Mutation> mutations = new List<Mutation>();
CommitResponse response = client.Commit(projectId, mode, mutations);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task CommitAsync2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
CommitRequest expectedRequest = new CommitRequest
{
ProjectId = "projectId-1969970175",
Mode = CommitRequest.Types.Mode.Unspecified,
Mutations = { },
};
CommitResponse expectedResponse = new CommitResponse
{
IndexUpdates = 1425228195,
};
mockGrpcClient.Setup(x => x.CommitAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<CommitResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
CommitRequest.Types.Mode mode = CommitRequest.Types.Mode.Unspecified;
IEnumerable<Mutation> mutations = new List<Mutation>();
CommitResponse response = await client.CommitAsync(projectId, mode, mutations);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void Commit3()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
CommitRequest request = new CommitRequest
{
ProjectId = "projectId-1969970175",
Mode = CommitRequest.Types.Mode.Unspecified,
Mutations = { },
};
CommitResponse expectedResponse = new CommitResponse
{
IndexUpdates = 1425228195,
};
mockGrpcClient.Setup(x => x.Commit(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
CommitResponse response = client.Commit(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task CommitAsync3()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
CommitRequest request = new CommitRequest
{
ProjectId = "projectId-1969970175",
Mode = CommitRequest.Types.Mode.Unspecified,
Mutations = { },
};
CommitResponse expectedResponse = new CommitResponse
{
IndexUpdates = 1425228195,
};
mockGrpcClient.Setup(x => x.CommitAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<CommitResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
CommitResponse response = await client.CommitAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void Rollback()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
RollbackRequest expectedRequest = new RollbackRequest
{
ProjectId = "projectId-1969970175",
Transaction = ByteString.CopyFromUtf8("-34"),
};
RollbackResponse expectedResponse = new RollbackResponse();
mockGrpcClient.Setup(x => x.Rollback(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
ByteString transaction = ByteString.CopyFromUtf8("-34");
RollbackResponse response = client.Rollback(projectId, transaction);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task RollbackAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
RollbackRequest expectedRequest = new RollbackRequest
{
ProjectId = "projectId-1969970175",
Transaction = ByteString.CopyFromUtf8("-34"),
};
RollbackResponse expectedResponse = new RollbackResponse();
mockGrpcClient.Setup(x => x.RollbackAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<RollbackResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
ByteString transaction = ByteString.CopyFromUtf8("-34");
RollbackResponse response = await client.RollbackAsync(projectId, transaction);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void Rollback2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
RollbackRequest request = new RollbackRequest
{
ProjectId = "projectId-1969970175",
Transaction = ByteString.CopyFromUtf8("-34"),
};
RollbackResponse expectedResponse = new RollbackResponse();
mockGrpcClient.Setup(x => x.Rollback(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
RollbackResponse response = client.Rollback(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task RollbackAsync2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
RollbackRequest request = new RollbackRequest
{
ProjectId = "projectId-1969970175",
Transaction = ByteString.CopyFromUtf8("-34"),
};
RollbackResponse expectedResponse = new RollbackResponse();
mockGrpcClient.Setup(x => x.RollbackAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<RollbackResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
RollbackResponse response = await client.RollbackAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void AllocateIds()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
AllocateIdsRequest expectedRequest = new AllocateIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
AllocateIdsResponse expectedResponse = new AllocateIdsResponse();
mockGrpcClient.Setup(x => x.AllocateIds(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
IEnumerable<Key> keys = new List<Key>();
AllocateIdsResponse response = client.AllocateIds(projectId, keys);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task AllocateIdsAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
AllocateIdsRequest expectedRequest = new AllocateIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
AllocateIdsResponse expectedResponse = new AllocateIdsResponse();
mockGrpcClient.Setup(x => x.AllocateIdsAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<AllocateIdsResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
IEnumerable<Key> keys = new List<Key>();
AllocateIdsResponse response = await client.AllocateIdsAsync(projectId, keys);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void AllocateIds2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
AllocateIdsRequest request = new AllocateIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
AllocateIdsResponse expectedResponse = new AllocateIdsResponse();
mockGrpcClient.Setup(x => x.AllocateIds(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
AllocateIdsResponse response = client.AllocateIds(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task AllocateIdsAsync2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
AllocateIdsRequest request = new AllocateIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
AllocateIdsResponse expectedResponse = new AllocateIdsResponse();
mockGrpcClient.Setup(x => x.AllocateIdsAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<AllocateIdsResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
AllocateIdsResponse response = await client.AllocateIdsAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void ReserveIds()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
ReserveIdsRequest expectedRequest = new ReserveIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
ReserveIdsResponse expectedResponse = new ReserveIdsResponse();
mockGrpcClient.Setup(x => x.ReserveIds(expectedRequest, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
IEnumerable<Key> keys = new List<Key>();
ReserveIdsResponse response = client.ReserveIds(projectId, keys);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task ReserveIdsAsync()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
ReserveIdsRequest expectedRequest = new ReserveIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
ReserveIdsResponse expectedResponse = new ReserveIdsResponse();
mockGrpcClient.Setup(x => x.ReserveIdsAsync(expectedRequest, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<ReserveIdsResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
string projectId = "projectId-1969970175";
IEnumerable<Key> keys = new List<Key>();
ReserveIdsResponse response = await client.ReserveIdsAsync(projectId, keys);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public void ReserveIds2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
ReserveIdsRequest request = new ReserveIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
ReserveIdsResponse expectedResponse = new ReserveIdsResponse();
mockGrpcClient.Setup(x => x.ReserveIds(request, It.IsAny<CallOptions>()))
.Returns(expectedResponse);
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
ReserveIdsResponse response = client.ReserveIds(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
[Fact]
public async Task ReserveIdsAsync2()
{
Mock<Datastore.DatastoreClient> mockGrpcClient = new Mock<Datastore.DatastoreClient>(MockBehavior.Strict);
ReserveIdsRequest request = new ReserveIdsRequest
{
ProjectId = "projectId-1969970175",
Keys = { },
};
ReserveIdsResponse expectedResponse = new ReserveIdsResponse();
mockGrpcClient.Setup(x => x.ReserveIdsAsync(request, It.IsAny<CallOptions>()))
.Returns(new Grpc.Core.AsyncUnaryCall<ReserveIdsResponse>(Task.FromResult(expectedResponse), null, null, null, null));
DatastoreClient client = new DatastoreClientImpl(mockGrpcClient.Object, null);
ReserveIdsResponse response = await client.ReserveIdsAsync(request);
Assert.Same(expectedResponse, response);
mockGrpcClient.VerifyAll();
}
}
}
| |
//------------------------------------------------------------------------------
// <license file="XMLLib.cs">
//
// The use and distribution terms for this software are contained in the file
// named 'LICENSE', which can be found in the resources directory of this
// distribution.
//
// By using this software in any fashion, you are agreeing to be bound by the
// terms of this license.
//
// </license>
//------------------------------------------------------------------------------
using System;
using System.Xml;
using System.Text;
namespace EcmaScript.NET.Types.E4X
{
class XMLLib
{
private IScriptable globalScope;
public IScriptable GlobalScope
{
get { return globalScope; }
}
private XMLLib (IScriptable globalScope)
{
this.globalScope = globalScope;
}
// Environment settings
internal bool ignoreComments;
internal bool ignoreProcessingInstructions;
internal bool ignoreWhitespace;
internal bool prettyPrinting;
internal int prettyIndent;
// prototypes
internal XML xmlPrototype;
internal XMLList xmlListPrototype;
internal Namespace namespacePrototype;
internal QName qnamePrototype;
internal void SetDefaultSettings ()
{
ignoreComments = true;
ignoreProcessingInstructions = true;
ignoreWhitespace = true;
prettyPrinting = true;
prettyIndent = 2;
}
private static readonly object XML_LIB_KEY = new object ();
public static XMLLib ExtractFromScopeOrNull (IScriptable scope)
{
ScriptableObject so = ScriptRuntime.getLibraryScopeOrNull (scope);
if (so == null) {
return null;
}
return (XMLLib)so.GetAssociatedValue (XML_LIB_KEY);
}
public static XMLLib ExtractFromScope (IScriptable scope)
{
XMLLib lib = ExtractFromScopeOrNull (scope);
if (lib != null) {
return lib;
}
string msg = ScriptRuntime.GetMessage ("msg.XML.not.available");
throw Context.ReportRuntimeError (msg);
}
internal XMLLib BindToScope (IScriptable scope)
{
ScriptableObject so = ScriptRuntime.getLibraryScopeOrNull (scope);
if (so == null) {
// standard library should be initialized at this point
throw new ApplicationException ();
}
return (XMLLib)so.AssociateValue (XML_LIB_KEY, this);
}
public static void Init (IScriptable scope, bool zealed)
{
XMLLib impl = new XMLLib (scope);
impl.SetDefaultSettings ();
impl.BindToScope (scope);
impl.xmlPrototype = XML.CreateEmptyXml (impl);
impl.xmlPrototype.ExportAsJSClass (zealed);
impl.xmlListPrototype = new XMLList (impl);
impl.xmlListPrototype.ExportAsJSClass (zealed);
impl.qnamePrototype = new QName (impl);
impl.qnamePrototype.ExportAsJSClass (zealed);
impl.namespacePrototype = new Namespace (impl);
impl.namespacePrototype.ExportAsJSClass (zealed);
}
/**
* See E4X 13.1.2.1.
*/
public bool IsXMLName (Context cx, object value)
{
string name;
try {
name = ScriptConvert.ToString (value);
}
catch (EcmaScriptError ee) {
if ("TypeError".Equals (ee.Name)) {
return false;
}
throw ee;
}
// See http://w3.org/TR/xml-names11/#NT-NCName
int length = name.Length;
if (length != 0) {
if (IsNCNameStartChar (name [0])) {
for (int i = 1; i != length; ++i) {
if (!IsNCNameChar (name [i])) {
return false;
}
}
return true;
}
}
return false;
}
private static bool IsNCNameStartChar (int c)
{
if ((c & ~0x7F) == 0) {
// Optimize for ASCII and use A..Z < _ < a..z
if (c >= 'a') {
return c <= 'z';
}
else if (c >= 'A') {
if (c <= 'Z') {
return true;
}
return c == '_';
}
}
else if ((c & ~0x1FFF) == 0) {
return (0xC0 <= c && c <= 0xD6)
|| (0xD8 <= c && c <= 0xF6)
|| (0xF8 <= c && c <= 0x2FF)
|| (0x370 <= c && c <= 0x37D)
|| 0x37F <= c;
}
return (0x200C <= c && c <= 0x200D)
|| (0x2070 <= c && c <= 0x218F)
|| (0x2C00 <= c && c <= 0x2FEF)
|| (0x3001 <= c && c <= 0xD7FF)
|| (0xF900 <= c && c <= 0xFDCF)
|| (0xFDF0 <= c && c <= 0xFFFD)
|| (0x10000 <= c && c <= 0xEFFFF);
}
private static bool IsNCNameChar (int c)
{
if ((c & ~0x7F) == 0) {
// Optimize for ASCII and use - < . < 0..9 < A..Z < _ < a..z
if (c >= 'a') {
return c <= 'z';
}
else if (c >= 'A') {
if (c <= 'Z') {
return true;
}
return c == '_';
}
else if (c >= '0') {
return c <= '9';
}
else {
return c == '-' || c == '.';
}
}
else if ((c & ~0x1FFF) == 0) {
return IsNCNameStartChar (c) || c == 0xB7
|| (0x300 <= c && c <= 0x36F);
}
return IsNCNameStartChar (c) || (0x203F <= c && c <= 0x2040);
}
public String GetDefaultNamespaceURI (Context cx)
{
String uri = "";
if (cx == null) {
cx = Context.CurrentContext;
}
if (cx != null) {
Object ns = ScriptRuntime.searchDefaultNamespace (cx);
if (ns != null) {
if (ns is Namespace) {
uri = ((Namespace)ns).Uri;
}
else {
// Should not happen but for now it could
// due to bad searchDefaultNamespace implementation.
}
}
}
return uri;
}
public Namespace GetDefaultNamespace (Context cx)
{
if (cx == null) {
cx = Context.CurrentContext;
if (cx == null) {
return namespacePrototype;
}
}
Namespace result;
Object ns = ScriptRuntime.searchDefaultNamespace (cx);
if (ns == null) {
result = namespacePrototype;
}
else {
if (ns is Namespace) {
result = (Namespace)ns;
}
else {
// Should not happen but for now it could
// due to bad searchDefaultNamespace implementation.
result = namespacePrototype;
}
}
return result;
}
public IRef NameRef (Context cx, object name, IScriptable scope, int memberTypeFlags)
{
XMLName nameRef = XMLName.Parse (this, cx, name);
if (nameRef == null)
return null;
return nameRef;
}
public IRef NameRef (Context cx, object ns, object name, IScriptable scope, int memberTypeFlags)
{
throw new NotImplementedException ();
}
public string EscapeAttributeValue (object value)
{
throw new NotImplementedException ();
}
public string EscapeTextValue (object value)
{
throw new NotImplementedException ();
}
public object ToDefaultXmlNamespace (Context cx, object uriValue)
{
return Namespace.Parse (this, cx, uriValue);
}
internal static EcmaScriptError BadXMLName (object value)
{
String msg;
if (CliHelper.IsNumber (value)) {
msg = "Can not construct XML name from number: ";
}
else if (value is Boolean) {
msg = "Can not construct XML name from boolean: ";
}
else if (value == Undefined.Value || value == null) {
msg = "Can not construct XML name from ";
}
else {
throw new ArgumentException (value.ToString ());
}
return ScriptRuntime.TypeError (msg + ScriptConvert.ToString (value));
}
internal XMLName toQualifiedName (Context cx, Object namespaceValue,
Object nameValue)
{
// This is duplication of constructQName(cx, namespaceValue, nameValue)
// but for XMLName
String uri;
String localName;
if (nameValue is QName) {
QName qname = (QName)nameValue;
localName = qname.LocalName;
}
else {
localName = ScriptConvert.ToString (nameValue);
}
Namespace ns;
if (namespaceValue == Undefined.Value) {
if ("*".Equals (localName)) {
ns = null;
}
else {
ns = GetDefaultNamespace (cx);
}
}
else if (namespaceValue == null) {
ns = null;
}
else if (namespaceValue is Namespace) {
ns = (Namespace)namespaceValue;
}
else {
ns = Namespace.Parse (this, cx, namespaceValue);
}
if (ns == null) {
uri = null;
}
else {
uri = ns.Uri;
}
return XMLName.FormProperty (uri, localName);
}
internal XMLList ToXMLList (object value)
{
if (value == null || value is Undefined)
return null;
if (value is XMLList)
return (XMLList)value;
if (value is XML)
return null;
if (value is XmlNode)
return null;
return new XMLList (this, value);
}
internal XML ToXML (object value)
{
if (value == null || value is Undefined)
return null;
if (value is XML)
return (XML)value;
if (value is XMLList)
return null;
if (value is XmlNode)
return new XML (this, (XmlNode)value);
return XML.CreateFromJS (this, value);
}
}
}
| |
using System;
using System.CodeDom.Compiler;
using System.Collections.Generic;
namespace EduHub.Data.Entities
{
/// <summary>
/// General Ledger Initiatives
/// </summary>
[GeneratedCode("EduHub Data", "0.9")]
public sealed partial class KGLINIT : EduHubEntity
{
#region Foreign Navigation Properties
private IReadOnlyList<ARF> Cache_INITIATIVE_ARF_INITIATIVE;
private IReadOnlyList<CRF> Cache_INITIATIVE_CRF_INITIATIVE;
private IReadOnlyList<CRPR> Cache_INITIATIVE_CRPR_INITIATIVE;
private IReadOnlyList<DFF> Cache_INITIATIVE_DFF_INITIATIVE;
private IReadOnlyList<DRF> Cache_INITIATIVE_DRF_INITIATIVE;
private IReadOnlyList<GLBUDG> Cache_INITIATIVE_GLBUDG_INITIATIVE;
private IReadOnlyList<GLCF> Cache_INITIATIVE_GLCF_INITIATIVE;
private IReadOnlyList<GLCFPREV> Cache_INITIATIVE_GLCFPREV_INITIATIVE;
private IReadOnlyList<GLF> Cache_INITIATIVE_GLF_INITIATIVE;
private IReadOnlyList<GLFPREV> Cache_INITIATIVE_GLFPREV_INITIATIVE;
private IReadOnlyList<PC> Cache_INITIATIVE_PC_INITIATIVE;
private IReadOnlyList<PD> Cache_INITIATIVE_PD_INITIATIVE;
private IReadOnlyList<PEF> Cache_INITIATIVE_PEF_INITIATIVE;
private IReadOnlyList<PEFH> Cache_INITIATIVE_PEFH_INITIATIVE;
private IReadOnlyList<PEPS> Cache_INITIATIVE_PEPS_INITIATIVE;
private IReadOnlyList<PEPU> Cache_INITIATIVE_PEPU_INITIATIVE;
private IReadOnlyList<PEPUH> Cache_INITIATIVE_PEPUH_INITIATIVE;
private IReadOnlyList<PI> Cache_INITIATIVE_PI_INITIATIVE;
private IReadOnlyList<PN> Cache_INITIATIVE_PN_INITIATIVE;
private IReadOnlyList<RQGL> Cache_INITIATIVE_RQGL_INITIATIVE;
private IReadOnlyList<RQT> Cache_INITIATIVE_RQT_INITIATIVE;
private IReadOnlyList<SA> Cache_INITIATIVE_SA_INITIATIVE;
private IReadOnlyList<SDFC> Cache_INITIATIVE_SDFC_INITIATIVE;
private IReadOnlyList<SGFC> Cache_INITIATIVE_SGFC_INITIATIVE;
#endregion
/// <inheritdoc />
public override DateTime? EntityLastModified
{
get
{
return LW_DATE;
}
}
#region Field Properties
/// <summary>
/// Type key, eg I
/// [Uppercase Alphanumeric (3)]
/// </summary>
public string INITIATIVE { get; internal set; }
/// <summary>
/// eg, INCOME
/// [Alphanumeric (50)]
/// </summary>
public string TITLE { get; internal set; }
/// <summary>
/// Allow account to be used(Y/N)
/// [Uppercase Alphanumeric (1)]
/// </summary>
public string ACTIVE { get; internal set; }
/// <summary>
/// Last write date
/// </summary>
public DateTime? LW_DATE { get; internal set; }
/// <summary>
/// Last write time
/// </summary>
public short? LW_TIME { get; internal set; }
/// <summary>
/// Last operator
/// [Uppercase Alphanumeric (128)]
/// </summary>
public string LW_USER { get; internal set; }
#endregion
#region Foreign Navigation Properties
/// <summary>
/// ARF (Asset Financial Transactions) related entities by [KGLINIT.INITIATIVE]->[ARF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<ARF> INITIATIVE_ARF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_ARF_INITIATIVE == null &&
!Context.ARF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_ARF_INITIATIVE))
{
Cache_INITIATIVE_ARF_INITIATIVE = new List<ARF>().AsReadOnly();
}
return Cache_INITIATIVE_ARF_INITIATIVE;
}
}
/// <summary>
/// CRF (Creditor Financial Transaction) related entities by [KGLINIT.INITIATIVE]->[CRF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<CRF> INITIATIVE_CRF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_CRF_INITIATIVE == null &&
!Context.CRF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_CRF_INITIATIVE))
{
Cache_INITIATIVE_CRF_INITIATIVE = new List<CRF>().AsReadOnly();
}
return Cache_INITIATIVE_CRF_INITIATIVE;
}
}
/// <summary>
/// CRPR (Creditor Purchase Requisitions) related entities by [KGLINIT.INITIATIVE]->[CRPR.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<CRPR> INITIATIVE_CRPR_INITIATIVE
{
get
{
if (Cache_INITIATIVE_CRPR_INITIATIVE == null &&
!Context.CRPR.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_CRPR_INITIATIVE))
{
Cache_INITIATIVE_CRPR_INITIATIVE = new List<CRPR>().AsReadOnly();
}
return Cache_INITIATIVE_CRPR_INITIATIVE;
}
}
/// <summary>
/// DFF (Family Financial Transactions) related entities by [KGLINIT.INITIATIVE]->[DFF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<DFF> INITIATIVE_DFF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_DFF_INITIATIVE == null &&
!Context.DFF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_DFF_INITIATIVE))
{
Cache_INITIATIVE_DFF_INITIATIVE = new List<DFF>().AsReadOnly();
}
return Cache_INITIATIVE_DFF_INITIATIVE;
}
}
/// <summary>
/// DRF (DR Transactions) related entities by [KGLINIT.INITIATIVE]->[DRF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<DRF> INITIATIVE_DRF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_DRF_INITIATIVE == null &&
!Context.DRF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_DRF_INITIATIVE))
{
Cache_INITIATIVE_DRF_INITIATIVE = new List<DRF>().AsReadOnly();
}
return Cache_INITIATIVE_DRF_INITIATIVE;
}
}
/// <summary>
/// GLBUDG (General Ledger Budgets) related entities by [KGLINIT.INITIATIVE]->[GLBUDG.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<GLBUDG> INITIATIVE_GLBUDG_INITIATIVE
{
get
{
if (Cache_INITIATIVE_GLBUDG_INITIATIVE == null &&
!Context.GLBUDG.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_GLBUDG_INITIATIVE))
{
Cache_INITIATIVE_GLBUDG_INITIATIVE = new List<GLBUDG>().AsReadOnly();
}
return Cache_INITIATIVE_GLBUDG_INITIATIVE;
}
}
/// <summary>
/// GLCF (GL Combined Financial Trans) related entities by [KGLINIT.INITIATIVE]->[GLCF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<GLCF> INITIATIVE_GLCF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_GLCF_INITIATIVE == null &&
!Context.GLCF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_GLCF_INITIATIVE))
{
Cache_INITIATIVE_GLCF_INITIATIVE = new List<GLCF>().AsReadOnly();
}
return Cache_INITIATIVE_GLCF_INITIATIVE;
}
}
/// <summary>
/// GLCFPREV (Last Years GL Combined Financial Trans) related entities by [KGLINIT.INITIATIVE]->[GLCFPREV.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<GLCFPREV> INITIATIVE_GLCFPREV_INITIATIVE
{
get
{
if (Cache_INITIATIVE_GLCFPREV_INITIATIVE == null &&
!Context.GLCFPREV.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_GLCFPREV_INITIATIVE))
{
Cache_INITIATIVE_GLCFPREV_INITIATIVE = new List<GLCFPREV>().AsReadOnly();
}
return Cache_INITIATIVE_GLCFPREV_INITIATIVE;
}
}
/// <summary>
/// GLF (General Ledger Transactions) related entities by [KGLINIT.INITIATIVE]->[GLF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<GLF> INITIATIVE_GLF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_GLF_INITIATIVE == null &&
!Context.GLF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_GLF_INITIATIVE))
{
Cache_INITIATIVE_GLF_INITIATIVE = new List<GLF>().AsReadOnly();
}
return Cache_INITIATIVE_GLF_INITIATIVE;
}
}
/// <summary>
/// GLFPREV (Last Years GL Financial Trans) related entities by [KGLINIT.INITIATIVE]->[GLFPREV.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<GLFPREV> INITIATIVE_GLFPREV_INITIATIVE
{
get
{
if (Cache_INITIATIVE_GLFPREV_INITIATIVE == null &&
!Context.GLFPREV.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_GLFPREV_INITIATIVE))
{
Cache_INITIATIVE_GLFPREV_INITIATIVE = new List<GLFPREV>().AsReadOnly();
}
return Cache_INITIATIVE_GLFPREV_INITIATIVE;
}
}
/// <summary>
/// PC (Cost Centres) related entities by [KGLINIT.INITIATIVE]->[PC.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PC> INITIATIVE_PC_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PC_INITIATIVE == null &&
!Context.PC.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PC_INITIATIVE))
{
Cache_INITIATIVE_PC_INITIATIVE = new List<PC>().AsReadOnly();
}
return Cache_INITIATIVE_PC_INITIATIVE;
}
}
/// <summary>
/// PD (Departments) related entities by [KGLINIT.INITIATIVE]->[PD.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PD> INITIATIVE_PD_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PD_INITIATIVE == null &&
!Context.PD.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PD_INITIATIVE))
{
Cache_INITIATIVE_PD_INITIATIVE = new List<PD>().AsReadOnly();
}
return Cache_INITIATIVE_PD_INITIATIVE;
}
}
/// <summary>
/// PEF (Payroll Transactions) related entities by [KGLINIT.INITIATIVE]->[PEF.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PEF> INITIATIVE_PEF_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PEF_INITIATIVE == null &&
!Context.PEF.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PEF_INITIATIVE))
{
Cache_INITIATIVE_PEF_INITIATIVE = new List<PEF>().AsReadOnly();
}
return Cache_INITIATIVE_PEF_INITIATIVE;
}
}
/// <summary>
/// PEFH (Payroll Transaction History) related entities by [KGLINIT.INITIATIVE]->[PEFH.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PEFH> INITIATIVE_PEFH_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PEFH_INITIATIVE == null &&
!Context.PEFH.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PEFH_INITIATIVE))
{
Cache_INITIATIVE_PEFH_INITIATIVE = new List<PEFH>().AsReadOnly();
}
return Cache_INITIATIVE_PEFH_INITIATIVE;
}
}
/// <summary>
/// PEPS (Standard and Last Pays) related entities by [KGLINIT.INITIATIVE]->[PEPS.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PEPS> INITIATIVE_PEPS_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PEPS_INITIATIVE == null &&
!Context.PEPS.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PEPS_INITIATIVE))
{
Cache_INITIATIVE_PEPS_INITIATIVE = new List<PEPS>().AsReadOnly();
}
return Cache_INITIATIVE_PEPS_INITIATIVE;
}
}
/// <summary>
/// PEPU (Super (SGL and Employee) YTD Transactions) related entities by [KGLINIT.INITIATIVE]->[PEPU.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PEPU> INITIATIVE_PEPU_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PEPU_INITIATIVE == null &&
!Context.PEPU.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PEPU_INITIATIVE))
{
Cache_INITIATIVE_PEPU_INITIATIVE = new List<PEPU>().AsReadOnly();
}
return Cache_INITIATIVE_PEPU_INITIATIVE;
}
}
/// <summary>
/// PEPUH (Super (SGL and Employee) History YTD Transactions) related entities by [KGLINIT.INITIATIVE]->[PEPUH.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PEPUH> INITIATIVE_PEPUH_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PEPUH_INITIATIVE == null &&
!Context.PEPUH.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PEPUH_INITIATIVE))
{
Cache_INITIATIVE_PEPUH_INITIATIVE = new List<PEPUH>().AsReadOnly();
}
return Cache_INITIATIVE_PEPUH_INITIATIVE;
}
}
/// <summary>
/// PI (Pay Items) related entities by [KGLINIT.INITIATIVE]->[PI.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PI> INITIATIVE_PI_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PI_INITIATIVE == null &&
!Context.PI.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PI_INITIATIVE))
{
Cache_INITIATIVE_PI_INITIATIVE = new List<PI>().AsReadOnly();
}
return Cache_INITIATIVE_PI_INITIATIVE;
}
}
/// <summary>
/// PN (Payroll Groups) related entities by [KGLINIT.INITIATIVE]->[PN.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<PN> INITIATIVE_PN_INITIATIVE
{
get
{
if (Cache_INITIATIVE_PN_INITIATIVE == null &&
!Context.PN.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_PN_INITIATIVE))
{
Cache_INITIATIVE_PN_INITIATIVE = new List<PN>().AsReadOnly();
}
return Cache_INITIATIVE_PN_INITIATIVE;
}
}
/// <summary>
/// RQGL (Purchasing Group GL Codes) related entities by [KGLINIT.INITIATIVE]->[RQGL.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<RQGL> INITIATIVE_RQGL_INITIATIVE
{
get
{
if (Cache_INITIATIVE_RQGL_INITIATIVE == null &&
!Context.RQGL.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_RQGL_INITIATIVE))
{
Cache_INITIATIVE_RQGL_INITIATIVE = new List<RQGL>().AsReadOnly();
}
return Cache_INITIATIVE_RQGL_INITIATIVE;
}
}
/// <summary>
/// RQT (Purchase Requisition Transaction) related entities by [KGLINIT.INITIATIVE]->[RQT.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<RQT> INITIATIVE_RQT_INITIATIVE
{
get
{
if (Cache_INITIATIVE_RQT_INITIATIVE == null &&
!Context.RQT.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_RQT_INITIATIVE))
{
Cache_INITIATIVE_RQT_INITIATIVE = new List<RQT>().AsReadOnly();
}
return Cache_INITIATIVE_RQT_INITIATIVE;
}
}
/// <summary>
/// SA (Fees) related entities by [KGLINIT.INITIATIVE]->[SA.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<SA> INITIATIVE_SA_INITIATIVE
{
get
{
if (Cache_INITIATIVE_SA_INITIATIVE == null &&
!Context.SA.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_SA_INITIATIVE))
{
Cache_INITIATIVE_SA_INITIATIVE = new List<SA>().AsReadOnly();
}
return Cache_INITIATIVE_SA_INITIATIVE;
}
}
/// <summary>
/// SDFC (Sundry Debtor Fees) related entities by [KGLINIT.INITIATIVE]->[SDFC.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<SDFC> INITIATIVE_SDFC_INITIATIVE
{
get
{
if (Cache_INITIATIVE_SDFC_INITIATIVE == null &&
!Context.SDFC.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_SDFC_INITIATIVE))
{
Cache_INITIATIVE_SDFC_INITIATIVE = new List<SDFC>().AsReadOnly();
}
return Cache_INITIATIVE_SDFC_INITIATIVE;
}
}
/// <summary>
/// SGFC (General Ledger Fees) related entities by [KGLINIT.INITIATIVE]->[SGFC.INITIATIVE]
/// Type key, eg I
/// </summary>
public IReadOnlyList<SGFC> INITIATIVE_SGFC_INITIATIVE
{
get
{
if (Cache_INITIATIVE_SGFC_INITIATIVE == null &&
!Context.SGFC.TryFindByINITIATIVE(INITIATIVE, out Cache_INITIATIVE_SGFC_INITIATIVE))
{
Cache_INITIATIVE_SGFC_INITIATIVE = new List<SGFC>().AsReadOnly();
}
return Cache_INITIATIVE_SGFC_INITIATIVE;
}
}
#endregion
}
}
| |
// <copyright file="MlkBiCgStabTest.cs" company="Math.NET">
// Math.NET Numerics, part of the Math.NET Project
// http://numerics.mathdotnet.com
// http://github.com/mathnet/mathnet-numerics
// http://mathnetnumerics.codeplex.com
//
// Copyright (c) 2009-2014 Math.NET
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
// </copyright>
using System;
using MathNet.Numerics.LinearAlgebra;
using MathNet.Numerics.LinearAlgebra.Single;
using MathNet.Numerics.LinearAlgebra.Single.Solvers;
using MathNet.Numerics.LinearAlgebra.Solvers;
using NUnit.Framework;
namespace MathNet.Numerics.UnitTests.LinearAlgebraTests.Single.Solvers.Iterative
{
/// <summary>
/// Tests for Multiple-Lanczos Bi-Conjugate Gradient stabilized iterative matrix solver.
/// </summary>
[TestFixture, Category("LASolver")]
public class MlkBiCgStabTest
{
/// <summary>
/// Convergence boundary.
/// </summary>
const float ConvergenceBoundary = 1e-5f;
/// <summary>
/// Maximum iterations.
/// </summary>
const int MaximumIterations = 1000;
/// <summary>
/// Solve wide matrix throws <c>ArgumentException</c>.
/// </summary>
[Test]
public void SolveWideMatrixThrowsArgumentException()
{
var matrix = new SparseMatrix(2, 3);
var input = new DenseVector(2);
var solver = new MlkBiCgStab();
Assert.That(() => matrix.SolveIterative(input, solver), Throws.ArgumentException);
}
/// <summary>
/// Solve long matrix throws <c>ArgumentException</c>.
/// </summary>
[Test]
public void SolveLongMatrixThrowsArgumentException()
{
var matrix = new SparseMatrix(3, 2);
var input = new DenseVector(3);
var solver = new MlkBiCgStab();
Assert.That(() => matrix.SolveIterative(input, solver), Throws.ArgumentException);
}
/// <summary>
/// Solve unit matrix and back multiply.
/// </summary>
[Test]
public void SolveUnitMatrixAndBackMultiply()
{
// Create the identity matrix
var matrix = SparseMatrix.CreateIdentity(100);
// Create the y vector
var y = Vector<float>.Build.Dense(matrix.RowCount, 1);
// Create an iteration monitor which will keep track of iterative convergence
var monitor = new Iterator<float>(
new IterationCountStopCriterion<float>(MaximumIterations),
new ResidualStopCriterion<float>(ConvergenceBoundary),
new DivergenceStopCriterion<float>(),
new FailureStopCriterion<float>());
var solver = new MlkBiCgStab();
// Solve equation Ax = y
var x = matrix.SolveIterative(y, solver, monitor);
// Now compare the results
Assert.IsNotNull(x, "#02");
Assert.AreEqual(y.Count, x.Count, "#03");
// Back multiply the vector
var z = matrix.Multiply(x);
// Check that the solution converged
Assert.IsTrue(monitor.Status == IterationStatus.Converged, "#04");
// Now compare the vectors
for (var i = 0; i < y.Count; i++)
{
Assert.GreaterOrEqual(ConvergenceBoundary, Math.Abs(y[i] - z[i]), "#05-" + i);
}
}
/// <summary>
/// Solve scaled unit matrix and back multiply.
/// </summary>
[Test]
public void SolveScaledUnitMatrixAndBackMultiply()
{
// Create the identity matrix
var matrix = SparseMatrix.CreateIdentity(100);
// Scale it with a funny number
matrix.Multiply((float)Math.PI, matrix);
// Create the y vector
var y = Vector<float>.Build.Dense(matrix.RowCount, 1);
// Create an iteration monitor which will keep track of iterative convergence
var monitor = new Iterator<float>(
new IterationCountStopCriterion<float>(MaximumIterations),
new ResidualStopCriterion<float>(ConvergenceBoundary),
new DivergenceStopCriterion<float>(),
new FailureStopCriterion<float>());
var solver = new MlkBiCgStab();
// Solve equation Ax = y
var x = matrix.SolveIterative(y, solver, monitor);
// Now compare the results
Assert.IsNotNull(x, "#02");
Assert.AreEqual(y.Count, x.Count, "#03");
// Back multiply the vector
var z = matrix.Multiply(x);
// Check that the solution converged
Assert.IsTrue(monitor.Status == IterationStatus.Converged, "#04");
// Now compare the vectors
for (var i = 0; i < y.Count; i++)
{
Assert.GreaterOrEqual(ConvergenceBoundary, Math.Abs(y[i] - z[i]), "#05-" + i);
}
}
/// <summary>
/// Solve poisson matrix and back multiply.
/// </summary>
[Test]
public void SolvePoissonMatrixAndBackMultiply()
{
// Create the matrix
var matrix = new SparseMatrix(25);
// Assemble the matrix. We assume we're solving the Poisson equation
// on a rectangular 5 x 5 grid
const int GridSize = 5;
// The pattern is:
// 0 .... 0 -1 0 0 0 0 0 0 0 0 -1 4 -1 0 0 0 0 0 0 0 0 -1 0 0 ... 0
for (var i = 0; i < matrix.RowCount; i++)
{
// Insert the first set of -1's
if (i > (GridSize - 1))
{
matrix[i, i - GridSize] = -1;
}
// Insert the second set of -1's
if (i > 0)
{
matrix[i, i - 1] = -1;
}
// Insert the centerline values
matrix[i, i] = 4;
// Insert the first trailing set of -1's
if (i < matrix.RowCount - 1)
{
matrix[i, i + 1] = -1;
}
// Insert the second trailing set of -1's
if (i < matrix.RowCount - GridSize)
{
matrix[i, i + GridSize] = -1;
}
}
// Create the y vector
var y = Vector<float>.Build.Dense(matrix.RowCount, 1);
// Due to datatype "float" it can happen that solution will not converge for specific random starting vectors
// That's why we will do 3 tries
for (var iteration = 0; iteration <= 3; iteration++)
{
// Create an iteration monitor which will keep track of iterative convergence
var monitor = new Iterator<float>(
new IterationCountStopCriterion<float>(MaximumIterations),
new ResidualStopCriterion<float>(ConvergenceBoundary),
new DivergenceStopCriterion<float>(),
new FailureStopCriterion<float>());
var solver = new MlkBiCgStab();
// Solve equation Ax = y
Vector<float> x;
try
{
x = matrix.SolveIterative(y, solver, monitor);
}
catch (Exception)
{
continue;
}
if (monitor.Status != IterationStatus.Converged)
{
continue;
}
// Now compare the results
Assert.IsNotNull(x, "#02");
Assert.AreEqual(y.Count, x.Count, "#03");
// Back multiply the vector
var z = matrix.Multiply(x);
// Now compare the vectors
for (var i = 0; i < y.Count; i++)
{
Assert.GreaterOrEqual(ConvergenceBoundary, Math.Abs(y[i] - z[i]), "#05-" + i);
}
return;
}
}
/// <summary>
/// Can solve for a random vector.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(5)]
public void CanSolveForRandomVector(int order)
{
// Due to datatype "float" it can happen that solution will not converge for specific random matrix
// That's why we will do 4 tries and downgrade stop criterion each time
for (var iteration = 6; iteration > 3; iteration--)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var vectorb = Vector<float>.Build.Random(order, 1);
var monitor = new Iterator<float>(
new IterationCountStopCriterion<float>(MaximumIterations),
new ResidualStopCriterion<float>(Math.Pow(1.0/10.0, iteration)));
var solver = new MlkBiCgStab();
var resultx = matrixA.SolveIterative(vectorb, solver, monitor);
if (monitor.Status != IterationStatus.Converged)
{
// Solution was not found, try again downgrading convergence boundary
continue;
}
Assert.AreEqual(matrixA.ColumnCount, resultx.Count);
var matrixBReconstruct = matrixA*resultx;
// Check the reconstruction.
for (var i = 0; i < order; i++)
{
Assert.AreEqual(vectorb[i], matrixBReconstruct[i], (float)Math.Pow(1.0/10.0, iteration - 3));
}
return;
}
Assert.Fail("Solution was not found in 3 tries");
}
/// <summary>
/// Can solve for a random matrix.
/// </summary>
/// <param name="order">Matrix order.</param>
[TestCase(5)]
public void CanSolveForRandomMatrix(int order)
{
// Due to datatype "float" it can happen that solution will not converge for specific random matrix
// That's why we will do 4 tries and downgrade stop criterion each time
for (var iteration = 6; iteration > 3; iteration--)
{
var matrixA = Matrix<float>.Build.Random(order, order, 1);
var matrixB = Matrix<float>.Build.Random(order, order, 1);
var monitor = new Iterator<float>(
new IterationCountStopCriterion<float>(MaximumIterations),
new ResidualStopCriterion<float>(Math.Pow(1.0/10.0, iteration)));
var solver = new MlkBiCgStab();
var matrixX = matrixA.SolveIterative(matrixB, solver, monitor);
if (monitor.Status != IterationStatus.Converged)
{
// Solution was not found, try again downgrading convergence boundary
continue;
}
// The solution X row dimension is equal to the column dimension of A
Assert.AreEqual(matrixA.ColumnCount, matrixX.RowCount);
// The solution X has the same number of columns as B
Assert.AreEqual(matrixB.ColumnCount, matrixX.ColumnCount);
var matrixBReconstruct = matrixA*matrixX;
// Check the reconstruction.
for (var i = 0; i < matrixB.RowCount; i++)
{
for (var j = 0; j < matrixB.ColumnCount; j++)
{
Assert.AreEqual(matrixB[i, j], matrixBReconstruct[i, j], (float)Math.Pow(1.0/10.0, iteration - 3));
}
}
return;
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.IO;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
namespace OpenSim.Region.CoreModules.World.Terrain.FileLoaders
{
public class LLRAW : ITerrainLoader
{
public struct HeightmapLookupValue : IComparable<HeightmapLookupValue>
{
public ushort Index;
public float Value;
public HeightmapLookupValue(ushort index, float value)
{
Index = index;
Value = value;
}
public int CompareTo(HeightmapLookupValue val)
{
return Value.CompareTo(val.Value);
}
}
/// <summary>Lookup table to speed up terrain exports</summary>
HeightmapLookupValue[] LookupHeightTable;
public LLRAW()
{
}
private void BuildLookupHeightTable()
{
LookupHeightTable = new HeightmapLookupValue[256 * 256];
for (int i = 0; i < 256; i++)
{
for (int j = 0; j < 256; j++)
{
LookupHeightTable[i + (j * 256)] = new HeightmapLookupValue((ushort)(i + (j * 256)), (float)((double)i * ((double)j / 128.0d)));
}
}
Array.Sort<HeightmapLookupValue>(LookupHeightTable);
}
#region ITerrainLoader Members
public ITerrainChannel LoadFile(string filename)
{
FileInfo file = new FileInfo(filename);
ITerrainChannel channel;
using (FileStream s = file.Open(FileMode.Open, FileAccess.Read))
channel = LoadStream(s);
return channel;
}
public ITerrainChannel LoadFile(string filename, int offsetX, int offsetY, int fileWidth, int fileHeight, int sectionWidth, int sectionHeight)
{
TerrainChannel retval = new TerrainChannel(sectionWidth, sectionHeight);
FileInfo file = new FileInfo(filename);
using (FileStream s = file.Open(FileMode.Open, FileAccess.Read))
using (BinaryReader bs = new BinaryReader(s))
{
int currFileYOffset = fileHeight - 1;
// if our region isn't on the first Y section of the areas to be landscaped, then
// advance to our section of the file
while (currFileYOffset > offsetY)
{
// read a whole strip of regions
int heightsToRead = sectionHeight * (fileWidth * sectionWidth);
bs.ReadBytes(heightsToRead * 13); // because there are 13 fun channels
currFileYOffset--;
}
// got to the Y start offset within the file of our region
// so read the file bits associated with our region
int y;
// for each Y within our Y offset
for (y = sectionHeight - 1; y >= 0; y--)
{
int currFileXOffset = 0;
// if our region isn't the first X section of the areas to be landscaped, then
// advance the stream to the X start pos of our section in the file
// i.e. eat X upto where we start
while (currFileXOffset < offsetX)
{
bs.ReadBytes(sectionWidth * 13);
currFileXOffset++;
}
// got to our X offset, so write our regions X line
int x;
for (x = 0; x < sectionWidth; x++)
{
// Read a strip and continue
retval[x, y] = bs.ReadByte() * (bs.ReadByte() / 128.0);
bs.ReadBytes(11);
}
// record that we wrote it
currFileXOffset++;
// if our region isn't the last X section of the areas to be landscaped, then
// advance the stream to the end of this Y column
while (currFileXOffset < fileWidth)
{
// eat the next regions x line
bs.ReadBytes(sectionWidth * 13); //The 13 channels again
currFileXOffset++;
}
}
}
return retval;
}
public ITerrainChannel LoadStream(Stream s)
{
// The raw format doesn't contain any dimension information.
// Guess the square dimensions by using the length of the raw file.
double dimension = Math.Sqrt((double)(s.Length / 13));
// Regions are always multiples of 256.
int trimmedDimension = (int)dimension - ((int)dimension % (int)Constants.RegionSize);
if (trimmedDimension < Constants.RegionSize)
trimmedDimension = (int)Constants.RegionSize;
TerrainChannel retval = new TerrainChannel(trimmedDimension, trimmedDimension);
using (BinaryReader bs = new BinaryReader(s))
{
int y;
for (y = 0; y < retval.Height; y++)
{
int x;
for (x = 0; x < retval.Width; x++)
{
retval[x, (retval.Height - 1) - y] = bs.ReadByte() * (bs.ReadByte() / 128.0);
bs.ReadBytes(11); // Advance the stream to next bytes.
}
}
}
return retval;
}
public void SaveFile(string filename, ITerrainChannel map)
{
FileInfo file = new FileInfo(filename);
using (FileStream s = file.Open(FileMode.CreateNew, FileAccess.Write))
SaveStream(s, map);
}
public void SaveStream(Stream s, ITerrainChannel map)
{
if (LookupHeightTable == null)
BuildLookupHeightTable();
using (BinaryWriter binStream = new BinaryWriter(s))
{
// Output the calculated raw
for (int y = 0; y < map.Height; y++)
{
for (int x = 0; x < map.Width; x++)
{
double t = map[x, (map.Height - 1) - y];
//if height is less than 0, set it to 0 as
//can't save -ve values in a LLRAW file
if (t < 0d)
{
t = 0d;
}
int index = 0;
// The lookup table is pre-sorted, so we either find an exact match or
// the next closest (smaller) match with a binary search
index = Array.BinarySearch<HeightmapLookupValue>(LookupHeightTable, new HeightmapLookupValue(0, (float)t));
if (index < 0)
index = ~index - 1;
index = LookupHeightTable[index].Index;
byte red = (byte) (index & 0xFF);
byte green = (byte) ((index >> 8) & 0xFF);
const byte blue = 20;
const byte alpha1 = 0;
const byte alpha2 = 0;
const byte alpha3 = 0;
const byte alpha4 = 0;
const byte alpha5 = 255;
const byte alpha6 = 255;
const byte alpha7 = 255;
const byte alpha8 = 255;
byte alpha9 = red;
byte alpha10 = green;
binStream.Write(red);
binStream.Write(green);
binStream.Write(blue);
binStream.Write(alpha1);
binStream.Write(alpha2);
binStream.Write(alpha3);
binStream.Write(alpha4);
binStream.Write(alpha5);
binStream.Write(alpha6);
binStream.Write(alpha7);
binStream.Write(alpha8);
binStream.Write(alpha9);
binStream.Write(alpha10);
}
}
}
LookupHeightTable = null;
}
public string FileExtension
{
get { return ".raw"; }
}
public virtual void SaveFile(ITerrainChannel m_channel, string filename,
int offsetX, int offsetY,
int fileWidth, int fileHeight,
int regionSizeX, int regionSizeY)
{
throw new System.Exception("Not Implemented");
}
#endregion
public override string ToString()
{
return "LL/SL RAW";
}
//Returns true if this extension is supported for terrain save-tile
public bool SupportsTileSave()
{
return false;
}
}
}
| |
// ------------------------------------------------------------------
// DirectX.Capture
//
// History:
// 2003-Jan-24 BL - created
//
// Copyright (c) 2003 Brian Low
// ------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.Collections;
using System.Runtime.InteropServices;
#if DSHOWNET
using DShowNET;
#else
using DirectShowLib;
#endif
namespace DirectX.Capture
{
/// <summary>
/// A collection of sources (or physical connectors) on an
/// audio or video device. This is used by the <see cref="Capture"/>
/// class to provide a list of available sources on the currently
/// selected audio and video devices. This class cannot be created
/// directly. This class assumes there is only 1 video and 1 audio
/// crossbar and all input pins route to a single output pin on each
/// crossbar.
/// </summary>
public class SourceCollection : CollectionBase, IDisposable
{
// ------------------ Constructors/Destructors -----------------------
/// <summary> Initialize collection with no sources. </summary>
internal SourceCollection()
{
InnerList.Capacity = 1;
}
/// <summary> Initialize collection with sources from graph. </summary>
internal SourceCollection(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice)
{
addFromGraph( graphBuilder, deviceFilter, isVideoDevice );
}
/// <summary> Destructor. Release unmanaged resources. </summary>
~SourceCollection()
{
Dispose();
}
// -------------------- Public Properties -----------------------
/// <summary> Get the source at the specified index. </summary>
public Source this[int index]
{
get { return( (Source) InnerList[index] ); }
}
/// <summary>
/// Gets or sets the source/physical connector currently in use.
/// This is marked internal so that the Capture class can control
/// how and when the source is changed.
/// </summary>
internal Source CurrentSource
{
get
{
// Loop through each source and find the first
// enabled source.
foreach ( Source s in InnerList )
{
if ( s.Enabled )
return( s );
}
return ( null );
}
set
{
if ( value == null )
{
// Disable all sources
foreach ( Source s in InnerList )
s.Enabled = false;
}
else if ( value is CrossbarSource )
{
// Enable this source
// (this will automatically disable all other sources)
value.Enabled = true;
}
else
{
// Disable all sources
// Enable selected source
foreach ( Source s in InnerList )
s.Enabled = false;
value.Enabled = true;
}
}
}
// -------------------- Public methods -----------------------
/// <summary> Empty the collection. </summary>
public new void Clear()
{
for ( int c = 0; c < InnerList.Count; c++ )
this[c].Dispose();
InnerList.Clear();
}
/// <summary> Release unmanaged resources. </summary>
public void Dispose()
{
Clear();
InnerList.Capacity = 1;
}
// -------------------- Protected Methods -----------------------
//#if NEWCODE
/// <summary> Populate the collection from a filter graph. </summary>
public void addFromGraph(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice)
//#else
// protected void addFromGraph ( ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter, bool isVideoDevice )
//#endif
{
Trace.Assert( graphBuilder != null );
ArrayList crossbars = findCrossbars( graphBuilder, deviceFilter );
foreach ( IAMCrossbar crossbar in crossbars )
{
ArrayList sources = findCrossbarSources( graphBuilder, crossbar, isVideoDevice );
InnerList.AddRange( sources );
}
if ( !isVideoDevice )
{
if ( InnerList.Count == 0 )
{
ArrayList sources = findAudioSources( graphBuilder, deviceFilter );
InnerList.AddRange( sources );
}
}
}
/// <summary>
/// Retrieve a list of crossbar filters in the graph.
/// Most hardware devices should have a maximum of 2 crossbars,
/// one for video and another for audio.
/// </summary>
protected ArrayList findCrossbars(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter)
{
ArrayList crossbars = new ArrayList();
Guid category = FindDirection.UpstreamOnly;
Guid type = new Guid();
Guid riid = typeof(IAMCrossbar).GUID;
int hr;
object comObj = null;
object comObjNext = null;
// Find the first interface, look upstream from the selected device
#if DSHOWNET
hr = graphBuilder.FindInterface( ref category, ref type, deviceFilter, ref riid, out comObj );
#else
hr = graphBuilder.FindInterface( category, type, deviceFilter, riid, out comObj );
#endif
while ( (hr == 0) && (comObj != null) )
{
// If found, add to the list
if ( comObj is IAMCrossbar )
{
crossbars.Add( comObj as IAMCrossbar );
// Find the second interface, look upstream from the next found crossbar
#if DSHOWNET
hr = graphBuilder.FindInterface( ref category, ref type, comObj as IBaseFilter, ref riid, out comObjNext );
#else
hr = graphBuilder.FindInterface( category, type, comObj as IBaseFilter, riid, out comObjNext );
#endif
comObj = comObjNext;
}
else
comObj = null;
}
return( crossbars );
}
/// <summary>
/// Populate the internal InnerList with sources/physical connectors
/// found on the crossbars. Each instance of this class is limited
/// to video only or audio only sources ( specified by the isVideoDevice
/// parameter on the constructor) so we check each source before adding
/// it to the list.
/// </summary>
protected ArrayList findCrossbarSources(ICaptureGraphBuilder2 graphBuilder, IAMCrossbar crossbar, bool isVideoDevice)
{
ArrayList sources = new ArrayList();
int hr;
int numOutPins;
int numInPins;
hr = crossbar.get_PinCounts( out numOutPins, out numInPins );
if ( hr < 0 )
Marshal.ThrowExceptionForHR( hr );
// We loop through every combination of output and input pin
// to see which combinations match.
// Loop through output pins
for ( int cOut = 0; cOut < numOutPins; cOut++ )
{
// Loop through input pins
for ( int cIn = 0; cIn < numInPins; cIn++ )
{
// Can this combination be routed?
hr = crossbar.CanRoute( cOut, cIn );
if ( hr == 0 )
{
// Yes, this can be routed
int relatedInputPin;
PhysicalConnectorType connectorType;
hr = crossbar.get_CrossbarPinInfo( true, cIn, out relatedInputPin, out connectorType );
if ( hr < 0 )
Marshal.ThrowExceptionForHR( hr );
// Add it to the list
CrossbarSource source = new CrossbarSource( crossbar, cOut, cIn, relatedInputPin, connectorType );
sources.Add( source );
}
}
}
// Some silly drivers (*cough* Nvidia *cough*) add crossbars
// with no real choices. Every input can only be routed to
// one output. Loop through every Source and see if there
// at least one other Source with the same output pin.
int refIndex = 0;
while ( refIndex < sources.Count )
{
bool found = false;
CrossbarSource refSource = (CrossbarSource) sources[refIndex];
for ( int c = 0; c < sources.Count; c++ )
{
CrossbarSource s = (CrossbarSource) sources[c];
if ( ( refSource.OutputPin == s.OutputPin ) && ( refIndex != c ) )
{
found = true;
break;
}
}
if ( found )
refIndex++;
else
sources.RemoveAt( refIndex );
}
// Some of the video input pins have related audio pins
// that should be connected at the same time. We noted the pin number
// in the CrossbarSource.RelatedInputPin. Now that we have all
// the sources, lookup the CrossbarSource object associated with
// that pin
foreach ( CrossbarSource source in sources )
{
if ( source.RelatedInputPin != -1 )
{
foreach( CrossbarSource related in sources )
{
if ( source.RelatedInputPin == related.InputPin )
source.RelatedInputSource = related;
}
}
}
// Remove any sources that are not of the correct type
for (int c=0; c<sources.Count; c++)
{
if ( ((CrossbarSource)sources[c]).ConnectorType < PhysicalConnectorType.Audio_Tuner )
{
if ( !isVideoDevice )
{
sources.RemoveAt( c );
c--;
}
}
else
{
if ( isVideoDevice )
{
sources.RemoveAt( c );
c--;
}
}
}
return( sources );
}
protected ArrayList findAudioSources(ICaptureGraphBuilder2 graphBuilder, IBaseFilter deviceFilter)
{
ArrayList sources = new ArrayList();
IAMAudioInputMixer audioInputMixer = deviceFilter as IAMAudioInputMixer;
if ( audioInputMixer != null )
{
// Get a pin enumerator off the filter
IEnumPins pinEnum;
int hr = deviceFilter.EnumPins( out pinEnum );
pinEnum.Reset();
if( (hr == 0) && (pinEnum != null) )
{
// Loop through each pin
IPin[] pins = new IPin[1];
#if VS2003 || DSHOWNET
int f;
#else
IntPtr f = IntPtr.Zero;
#endif
do
{
// Get the next pin
#if VS2003 || DSHOWNET
hr = pinEnum.Next( 1, pins, out f );
#else
hr = pinEnum.Next(1, pins, f);
#endif
if( (hr == 0) && (pins[0] != null) )
{
// Is this an input pin?
PinDirection dir = PinDirection.Output;
hr = pins[0].QueryDirection( out dir );
if( (hr == 0) && (dir == (PinDirection.Input)) )
{
// Add the input pin to the sources list
AudioSource source = new AudioSource( pins[0] );
sources.Add( source );
}
pins[0] = null;
}
}
while( hr == 0 );
Marshal.ReleaseComObject( pinEnum ); pinEnum = null;
}
}
// If there is only one source, don't return it
// because there is nothing for the user to choose.
// (Hopefully that single source is already enabled).
if ( sources.Count == 1 )
sources.Clear();
return( sources );
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections;
using System.Collections.Generic;
using Xunit;
namespace System.Net.Primitives.Functional.Tests
{
public static class CredentialCacheTest
{
private static readonly Uri uriPrefix1 = new Uri("http://microsoft:80");
private static readonly Uri uriPrefix2 = new Uri("http://softmicro:80");
private static readonly string host1 = "host1";
private static readonly string host2 = "host2";
private static readonly int port1 = 500;
private static readonly int port2 = 700;
private static readonly string authenticationType1 = "authenticationType1";
private static readonly string authenticationType2 = "authenticationType2";
private static readonly NetworkCredential credential1 = new NetworkCredential("username1", "password");
private static readonly NetworkCredential credential2 = new NetworkCredential("username2", "password");
private static readonly NetworkCredential credential3 = new NetworkCredential("username3", "password");
private static readonly NetworkCredential credential4 = new NetworkCredential("username4", "password");
private static readonly NetworkCredential credential5 = new NetworkCredential("username5", "password");
private static readonly NetworkCredential credential6 = new NetworkCredential("username6", "password");
private static readonly NetworkCredential credential7 = new NetworkCredential("username7", "password");
private static readonly NetworkCredential credential8 = new NetworkCredential("username8", "password");
private static CredentialCache UriAuthenticationTypeCredentialCache()
{
CredentialCache cc = new CredentialCache();
cc.Add(uriPrefix1, authenticationType1, credential1);
cc.Add(uriPrefix1, authenticationType2, credential2);
cc.Add(uriPrefix2, authenticationType1, credential3);
cc.Add(uriPrefix2, authenticationType2, credential4);
return cc;
}
private static CredentialCache HostPortAuthenticationTypeCredentialCache()
{
CredentialCache cc = new CredentialCache();
cc.Add(host1, port1, authenticationType1, credential1);
cc.Add(host1, port1, authenticationType2, credential2);
cc.Add(host1, port2, authenticationType1, credential3);
cc.Add(host1, port2, authenticationType2, credential4);
cc.Add(host2, port1, authenticationType1, credential5);
cc.Add(host2, port1, authenticationType2, credential6);
cc.Add(host2, port2, authenticationType1, credential7);
cc.Add(host2, port2, authenticationType2, credential8);
return cc;
}
[Fact]
public static void Ctor_Empty_Success()
{
CredentialCache cc = new CredentialCache();
}
[Fact]
public static void Add_UriAuthenticationTypeCredential_Success()
{
CredentialCache cc = UriAuthenticationTypeCredentialCache();
Assert.Equal(credential1, cc.GetCredential(uriPrefix1, authenticationType1));
Assert.Equal(credential2, cc.GetCredential(uriPrefix1, authenticationType2));
Assert.Equal(credential3, cc.GetCredential(uriPrefix2, authenticationType1));
Assert.Equal(credential4, cc.GetCredential(uriPrefix2, authenticationType2));
}
[Fact]
public static void Add_UriAuthenticationTypeCredential_Invalid()
{
CredentialCache cc = UriAuthenticationTypeCredentialCache();
Assert.Null(cc.GetCredential(new Uri("http://invalid.uri"), authenticationType1)); //No such uriPrefix
Assert.Null(cc.GetCredential(uriPrefix1, "invalid-authentication-type")); //No such authenticationType
Assert.Throws<ArgumentNullException>(() => cc.Add(null, "some", new NetworkCredential())); //Null uriPrefix
Assert.Throws<ArgumentNullException>(() => cc.Add(new Uri("http://microsoft:80"), null, new NetworkCredential())); //Null authenticationType
}
[Fact]
public static void Add_HostPortAuthenticationTypeCredential_Success()
{
CredentialCache cc = HostPortAuthenticationTypeCredentialCache();
Assert.Equal(credential1, cc.GetCredential(host1, port1, authenticationType1));
Assert.Equal(credential2, cc.GetCredential(host1, port1, authenticationType2));
Assert.Equal(credential3, cc.GetCredential(host1, port2, authenticationType1));
Assert.Equal(credential4, cc.GetCredential(host1, port2, authenticationType2));
Assert.Equal(credential5, cc.GetCredential(host2, port1, authenticationType1));
Assert.Equal(credential6, cc.GetCredential(host2, port1, authenticationType2));
Assert.Equal(credential7, cc.GetCredential(host2, port2, authenticationType1));
Assert.Equal(credential8, cc.GetCredential(host2, port2, authenticationType2));
}
[Fact]
public static void Add_HostPortAuthenticationTypeCredential_Invalid()
{
CredentialCache cc = HostPortAuthenticationTypeCredentialCache();
Assert.Null(cc.GetCredential("invalid-host", port1, authenticationType1)); //No such host
Assert.Null(cc.GetCredential(host1, 900, authenticationType1)); //No such port
Assert.Null(cc.GetCredential(host1, port1, "invalid-authentication-type")); //No such authenticationType
Assert.Throws<ArgumentNullException>(() => cc.Add(null, 500, "authenticationType", new NetworkCredential())); //Null host
Assert.Throws<ArgumentNullException>(() => cc.Add("host", 500, null, new NetworkCredential())); //Null authenticationType
Assert.Throws<ArgumentException>(() => cc.Add("", 500, "authenticationType", new NetworkCredential())); //Empty host
Assert.Throws<ArgumentOutOfRangeException>(() => cc.Add("host", -1, "authenticationType", new NetworkCredential())); //Port < 0
}
[Fact]
public static void Remove_UriAuthenticationType_Success()
{
CredentialCache cc = UriAuthenticationTypeCredentialCache();
cc.Remove(uriPrefix1, authenticationType1);
Assert.Null(cc.GetCredential(uriPrefix1, authenticationType1));
}
[Fact]
public static void Remove_UriAuthenticationType_Invalid()
{
CredentialCache cc = new CredentialCache();
//Doesn't throw, just returns
cc.Remove(null, "authenticationType");
cc.Remove(new Uri("http://some.com"), null);
cc.Remove(new Uri("http://some.com"), "authenticationType");
}
[Fact]
public static void Remove_HostPortAuthenticationType_Success()
{
CredentialCache cc = HostPortAuthenticationTypeCredentialCache();
cc.Remove(host1, port1, authenticationType1);
Assert.Null(cc.GetCredential(host1, port1, authenticationType1));
}
[Fact]
public static void Remove_HostPortAuthenticationType_Invalid()
{
CredentialCache cc = new CredentialCache();
//Doesn't throw, just returns
cc.Remove(null, 500, "authenticationType");
cc.Remove("host", 500, null);
cc.Remove("host", -1, "authenticationType");
cc.Remove("host", 500, "authenticationType");
}
[Fact]
public static void GetCredential_SimilarUriAuthenticationType_GetLongestUriPrefix()
{
CredentialCache cc = new CredentialCache();
cc.Add(new Uri("http://microsoft:80/greaterpath"), authenticationType1, credential2);
cc.Add(new Uri("http://microsoft:80/"), authenticationType1, credential1);
NetworkCredential nc = cc.GetCredential(new Uri("http://microsoft:80"), authenticationType1);
Assert.Equal(nc, credential2);
}
[Fact]
public static void GetCredential_UriAuthenticationType_Invalid()
{
CredentialCache cc = new CredentialCache();
Assert.Throws<ArgumentNullException>(() => cc.GetCredential(null, "authenticationType")); //Null uriPrefix
Assert.Throws<ArgumentNullException>(() => cc.GetCredential(new Uri("http://microsoft:80"), null)); //Null authenticationType
}
[Fact]
public static void GetCredential_HostPortAuthenticationType_Invalid()
{
CredentialCache cc = new CredentialCache();
Assert.Throws<ArgumentNullException>(() => cc.GetCredential(null, 500, "authenticationType")); //Null host
Assert.Throws<ArgumentNullException>(() => cc.GetCredential("host", 500, null)); //Null authenticationType
Assert.Throws<ArgumentException>(() => cc.GetCredential("", 500, "authenticationType")); //Empty host
Assert.Throws<ArgumentOutOfRangeException>(() => cc.GetCredential("host", -1, "authenticationType")); //Port < 0
}
[Fact]
public static void GetEnumerator_Enumerate_Success()
{
CredentialCache cc = HostPortAuthenticationTypeCredentialCache();
IEnumerator enumerator = cc.GetEnumerator();
Assert.NotNull(enumerator);
while (enumerator.MoveNext())
{
object item = enumerator.Current;
Assert.NotNull(item);
}
}
[Fact]
public static void GetEnumerator_MoveNextSynchronization_Invalid()
{
//An InvalidOperationException is thrown when moving the enumerator
//when a credential is added to the cache after getting the enumerator
CredentialCache cc = HostPortAuthenticationTypeCredentialCache();
IEnumerator enumerator = cc.GetEnumerator();
cc.Add(uriPrefix1, authenticationType1, credential1);
Assert.Throws<InvalidOperationException>(() => enumerator.MoveNext());
}
[Fact]
public static void GetEnumerator_CurrentSynchronization_Invalid()
{
//An InvalidOperationException is thrown when getting the current enumerated object
//when a credential is added to the cache after getting the enumerator
CredentialCache cc = HostPortAuthenticationTypeCredentialCache();
IEnumerator enumerator = cc.GetEnumerator();
enumerator.MoveNext();
cc.Add(uriPrefix1, authenticationType1, credential1);
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
}
[Fact]
public static void GetEnumerator_ResetIndexGetCurrent_Invalid()
{
CredentialCache cc = new CredentialCache();
IEnumerator enumerator = cc.GetEnumerator();
enumerator.Reset();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
}
[Fact]
public static void GetEnumerator_MoveNextIndex_Invalid()
{
CredentialCache cc = new CredentialCache();
IEnumerator enumerator = cc.GetEnumerator();
enumerator.MoveNext();
Assert.Throws<InvalidOperationException>(() => enumerator.Current);
}
[Fact]
public static void DefaultCredentials_Get_Success()
{
NetworkCredential c = CredentialCache.DefaultCredentials as NetworkCredential;
Assert.NotNull(c);
Assert.Equal(String.Empty, c.UserName);
Assert.Equal(String.Empty, c.Password);
Assert.Equal(String.Empty, c.Domain);
}
[Fact]
public static void AddRemove_UriAuthenticationTypeDefaultCredentials_Success()
{
NetworkCredential nc = CredentialCache.DefaultNetworkCredentials as NetworkCredential;
CredentialCache cc = new CredentialCache();
cc.Add(uriPrefix1, authenticationType1, nc);
Assert.Equal(nc, cc.GetCredential(uriPrefix1, authenticationType1));
cc.Remove(uriPrefix1, authenticationType1);
Assert.Null(cc.GetCredential(uriPrefix1, authenticationType1));
}
[Fact]
public static void AddRemove_HostPortAuthenticationTypeDefaultCredentials_Success()
{
NetworkCredential nc = CredentialCache.DefaultNetworkCredentials as NetworkCredential;
CredentialCache cc = new CredentialCache();
cc.Add(host1, port1, authenticationType1, nc);
Assert.Equal(nc, cc.GetCredential(host1, port1, authenticationType1));
cc.Remove(host1, port1, authenticationType1);
Assert.Null(cc.GetCredential(host1, port1, authenticationType1));
}
[Fact]
public static void DefaultNetworkCredentials_Get_Success()
{
NetworkCredential nc = CredentialCache.DefaultNetworkCredentials as NetworkCredential;
Assert.NotNull(nc);
Assert.Equal(String.Empty, nc.UserName);
Assert.Equal(String.Empty, nc.Password);
Assert.Equal(String.Empty, nc.Domain);
}
}
}
| |
#region Header
// --------------------------------------------------------------------------
// Tethys.Silverlight
// ==========================================================================
//
// This library contains common code for WPF, Silverlight, Windows Phone and
// Windows 8 projects.
//
// ===========================================================================
//
// <copyright file="CRC16.cs" company="Tethys">
// Copyright 2010-2015 by Thomas Graf
// All rights reserved.
// Licensed under the Apache License, Version 2.0.
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied.
// </copyright>
//
// System ... Microsoft .Net Framework 4.5
// Tools .... Microsoft Visual Studio 2013
//
// ---------------------------------------------------------------------------
#endregion
namespace Tethys.Silverlight.Cryptography
{
using System;
using System.IO;
using System.Runtime.InteropServices.WindowsRuntime;
using Windows.Storage.Streams;
/// <summary>
/// The class CRC16 implements a fast CRC-16 checksum algorithm,
/// that uses the following polynomial:<br/>
/// <code>
/// x**16 + x**15 + x**2 + 1 ( = 0x8005)
/// </code>
/// Default initial value = 0x0000,<br/>
/// default final XOR value = 0x0000.
/// </summary>
public sealed class CRC16
{
// ===================================================================
// CHECKSUM RESULTS
// ===================================================================
// TEST1 is the three character sequence "ABC".
// TEST2 is the three character sequence "CBA".
// TEST3 is the eight character sequence of "12345678"
// TEST4 is the 1024 character sequence of "12345678"
// repeated 128 times.
//
// Value byte[0] byte[1]
// --------- ------- -------
// CRC-16(TEST1) = 4521 0x21 0x45
// CRC-16(TEST2) = 4401 0x01 0x44
// CRC-16(TEST3) = 3C9D 0x9D 0x3C
// CRC-16(TEST4) = EBF3 0xF3 0xEB
// ===================================================================
/*****************************************************************************
Note This algorithm is specified by W. D. Schwaderer in his book
---- "C Programmer's Guide to NetBIOS" Howard W. Sams & Company
First Edition 1988.
The implementation of the minimized-table-4-bit variant was designed
by Marcellus Buchheit after the guidelines of the CRC-16 algorithm
in the book above.
==============================================================================
Explanation of the CRC algorithm intermediate remainder register motion.
The V(x) positions are insertion points of the polynom, x is the power.
Please note, that the orientation of the bits in the remainder register is
switched from left to right: The left bit is bit 0, the right bit is bit 15.
This solves the problem that the incoming data bytes start with the lowest bit
and not (as stored in the PC memory) with the highest bit. This diagram is
an expansation and a correction (!) of the [Schwaderer] tables at page 190
and 191.
CRC-16 V(0) V(2) V(15) <- insert positions
------ 16 15 14 13 12 11 10 09 08 07 06 05 04 03 02 01
-- -- -- -- -- -- -- -- -- -- -- -- -- -- -- --
Cycle 1 V1 V1 V1
Cycle 2 V1 V1 V1 V1 V1
V2 V2 V2
Cycle 3 V1 V1 V2 V1 V1 V1
V2 V2 V3 V2 V2
V3 V3
Cycle 4 V1 V1 V3 V2 V1 V1 V1
V2 V2 V4 V3 V2 V2
V3 V3 V3
V4 V4
Cycle 5 V1 V1 V4 V3 V2 V1 V1 V1
V2 V2 V5 V4 V3 V2 V2
V3 V3 V3
V4 V4 V4
V5 V5
Cycle 6 V1 V1 V5 V4 V3 V2 V1 V1 V1
V2 V2 V6 V5 V4 V3 V2 V2
V3 V3 V3
V4 V4 V4
V5 V5 V5
V6 V6
Cycle 7 V1 V1 V6 V5 V4 V3 V2 V1 V1 V1
V2 V2 V7 V6 V5 V4 V3 V2 V2
V3 V3 V3
V4 V4 V4
V5 V5 V5
V6 V6 V6
V7 V7
Cycle 8 V1 V1 V7 V6 V5 V4 V3 V2 V1 V1 V1
V2 V2 V8 V7 V6 V5 V4 V3 V2 V2
V3 V3 V3
V4 V4 V4
V5 V5 V5
V6 V6 V6
V7 V7 V7
V8 V8
*****************************************************************************/
/// <summary>
/// Constants used to compute the CRC-16 checksum.
/// (Table for CCITT polynomial.)
/// </summary>
private static readonly uint[] Tab16 =
{
0x0000, 0xC0C1, 0xC181, 0x0140, 0xC301, 0x03C0, 0x0280, 0xC241,
0xC601, 0x06C0, 0x0780, 0xC741, 0x0500, 0xC5C1, 0xC481, 0x0440,
0xCC01, 0x0CC0, 0x0D80, 0xCD41, 0x0F00, 0xCFC1, 0xCE81, 0x0E40,
0x0A00, 0xCAC1, 0xCB81, 0x0B40, 0xC901, 0x09C0, 0x0880, 0xC841,
0xD801, 0x18C0, 0x1980, 0xD941, 0x1B00, 0xDBC1, 0xDA81, 0x1A40,
0x1E00, 0xDEC1, 0xDF81, 0x1F40, 0xDD01, 0x1DC0, 0x1C80, 0xDC41,
0x1400, 0xD4C1, 0xD581, 0x1540, 0xD701, 0x17C0, 0x1680, 0xD641,
0xD201, 0x12C0, 0x1380, 0xD341, 0x1100, 0xD1C1, 0xD081, 0x1040,
0xF001, 0x30C0, 0x3180, 0xF141, 0x3300, 0xF3C1, 0xF281, 0x3240,
0x3600, 0xF6C1, 0xF781, 0x3740, 0xF501, 0x35C0, 0x3480, 0xF441,
0x3C00, 0xFCC1, 0xFD81, 0x3D40, 0xFF01, 0x3FC0, 0x3E80, 0xFE41,
0xFA01, 0x3AC0, 0x3B80, 0xFB41, 0x3900, 0xF9C1, 0xF881, 0x3840,
0x2800, 0xE8C1, 0xE981, 0x2940, 0xEB01, 0x2BC0, 0x2A80, 0xEA41,
0xEE01, 0x2EC0, 0x2F80, 0xEF41, 0x2D00, 0xEDC1, 0xEC81, 0x2C40,
0xE401, 0x24C0, 0x2580, 0xE541, 0x2700, 0xE7C1, 0xE681, 0x2640,
0x2200, 0xE2C1, 0xE381, 0x2340, 0xE101, 0x21C0, 0x2080, 0xE041,
0xA001, 0x60C0, 0x6180, 0xA141, 0x6300, 0xA3C1, 0xA281, 0x6240,
0x6600, 0xA6C1, 0xA781, 0x6740, 0xA501, 0x65C0, 0x6480, 0xA441,
0x6C00, 0xACC1, 0xAD81, 0x6D40, 0xAF01, 0x6FC0, 0x6E80, 0xAE41,
0xAA01, 0x6AC0, 0x6B80, 0xAB41, 0x6900, 0xA9C1, 0xA881, 0x6840,
0x7800, 0xB8C1, 0xB981, 0x7940, 0xBB01, 0x7BC0, 0x7A80, 0xBA41,
0xBE01, 0x7EC0, 0x7F80, 0xBF41, 0x7D00, 0xBDC1, 0xBC81, 0x7C40,
0xB401, 0x74C0, 0x7580, 0xB541, 0x7700, 0xB7C1, 0xB681, 0x7640,
0x7200, 0xB2C1, 0xB381, 0x7340, 0xB101, 0x71C0, 0x7080, 0xB041,
0x5000, 0x90C1, 0x9181, 0x5140, 0x9301, 0x53C0, 0x5280, 0x9241,
0x9601, 0x56C0, 0x5780, 0x9741, 0x5500, 0x95C1, 0x9481, 0x5440,
0x9C01, 0x5CC0, 0x5D80, 0x9D41, 0x5F00, 0x9FC1, 0x9E81, 0x5E40,
0x5A00, 0x9AC1, 0x9B81, 0x5B40, 0x9901, 0x59C0, 0x5880, 0x9841,
0x8801, 0x48C0, 0x4980, 0x8941, 0x4B00, 0x8BC1, 0x8A81, 0x4A40,
0x4E00, 0x8EC1, 0x8F81, 0x4F40, 0x8D01, 0x4DC0, 0x4C80, 0x8C41,
0x4400, 0x84C1, 0x8581, 0x4540, 0x8701, 0x47C0, 0x4680, 0x8641,
0x8201, 0x42C0, 0x4380, 0x8341, 0x4100, 0x81C1, 0x8081, 0x4040
}; // Tab16[]
/// <summary>
/// Hash algorithm name.
/// </summary>
private const string AlgorithmNameCrc16 = "CRC16";
/// <summary>
/// Hash size in bytes.
/// </summary>
private const int HashSizeBytesCrc16 = 2;
/// <summary>
/// Default initial value.
/// </summary>
public const ushort DefaultInit = 0x0000;
/// <summary>
/// Default final XOR value.
/// </summary>
public const ushort DefaultXor = 0x0000;
/// <summary>
/// Initial value.
/// </summary>
private ushort initValue;
/// <summary>
/// Final XOR value.
/// </summary>
private ushort xorValue;
/// <summary>
/// 16 bit CRC value.
/// </summary>
private ushort crc;
#region PUBLIC HASH ALGORITHM METHODS
/// <summary>
/// Gets or sets the initial value.
/// </summary>
public ushort InitValue
{
get
{
return this.initValue;
}
set
{
this.initValue = value;
this.Initialize();
}
} // InitValue
/// <summary>
/// Gets or sets the final XOR value.
/// </summary>
public ushort XorValue
{
get { return this.xorValue; }
set { this.xorValue = value; }
} // XorValue
/// <summary>
/// Gets the name of the open hash algorithm.
/// </summary>
public string AlgorithmName
{
get
{
return AlgorithmNameCrc16;
}
} // AlgorithmName
/// <summary>
/// Gets the length, in bytes, of the hash.
/// </summary>
public int HashLength
{
get
{
return HashSizeBytesCrc16;
}
} // HashLength
/// <summary>
/// Initializes a new instance of the <see cref="CRC16"/> class.
/// </summary>
public CRC16()
{
this.initValue = DefaultInit;
this.xorValue = DefaultXor;
this.Initialize();
} // CRC16()
/// <summary>
/// Initializes an implementation of HashAlgorithm.
/// </summary>
public void Initialize()
{
this.crc = this.initValue;
} // Initialize()
/// <summary>
/// Hashes the data.
/// </summary>
/// <param name="data">Data to be hashed.</param>
/// <returns>Hashed data.</returns>
public byte[] HashData(IBuffer data)
{
if (data == null)
{
throw new ArgumentNullException("data");
} // if
this.Initialize();
// cast IBuffer to something we can use
Stream stream = data.AsStream();
for (int o = 0; o < data.Length; o++)
{
// calculate CRC for next byte
// using table with 256 entries
int i = (this.crc ^ stream.ReadByte()) & 0x00FF;
this.crc = (ushort)(((this.crc >> 8) & 0x00FF) ^ Tab16[i]);
} // for
return this.HashFinal();
} // HashData()
/// <summary>
/// Hashes the data.
/// </summary>
/// <param name="buffer">The input for which to compute the hash code. </param>
/// <param name="offset">The offset into the byte array from which to begin using data. </param>
/// <param name="count">The number of bytes in the byte array to use as data.</param>
/// <returns>Hashed data.</returns>
public byte[] HashData(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException("buffer");
} // if
this.Initialize();
this.HashCore(buffer, offset, count);
return this.HashFinal();
} // HashData()
#endregion // PUBLIC HASH ALGORITHM METHODS
#region PROTECTED HASH ALGORITHM METHODS
/// <summary>
/// Routes data written to the object into the hash
/// algorithm for computing the hash.<br/>
/// This function calculates the CRC-16-checksum via the specified partial block.
/// The CRC-16 value of the previous calculation is updated the specified block.<br/>
/// </summary>
/// <param name="buffer">The input for which to compute the hash code. </param>
/// <param name="offset">The offset into the byte array from which to begin using data. </param>
/// <param name="count">The number of bytes in the byte array to use as data.</param>
/// <remarks>
/// Before the CRC-16 for a complete sequence is calculated, the CRC value must
/// be set to 0. After calculation the complete sequence, the result is stored
/// in the little-endian form (low byte/high byte) after the sequence. To check
/// the complete sequence including this value, the same values are used and the
/// final result must be 0, otherwise one or more bits in the complete sequence
/// is wrong.
/// It uses following generator polynomial:
/// x**16 + x**15 + x**2 + x + 1
/// </remarks>
private void HashCore(byte[] buffer, int offset, int count)
{
for (; count != 0; count--, offset++)
{
// calculate CRC for next byte
// using table with 256 entries
int i = (this.crc ^ buffer[offset]) & 0x00FF;
this.crc = (ushort)(((this.crc >> 8) & 0x00FF) ^ Tab16[i]);
} // for
} // HashCore()
/// <summary>
/// Finalizes the hash computation after the last data is processed
/// by the cryptographic stream object.
/// </summary>
/// <returns>The computed hash code.</returns>
private byte[] HashFinal()
{
ushort crcRet = (ushort)(this.crc ^ this.xorValue);
// save new calculated value
byte[] hashValue = new byte[HashSizeBytesCrc16];
hashValue[0] = (byte)((crcRet & 0xff00) >> 8);
hashValue[1] = (byte)(crcRet & 0x00ff);
return hashValue;
} // HashFinal()
#endregion // PROTECTED HASH ALGORITHM METHODS
} // CRC16
} // Tethys.Silverlight.Cryptography
| |
#region -- License Terms --
//
// MessagePack for CLI
//
// Copyright (C) 2010-2014 FUJIWARA, Yusuke
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
#endregion -- License Terms --
#if UNITY_STANDALONE || UNITY_WEBPLAYER || UNITY_WII || UNITY_IPHONE || UNITY_ANDROID || UNITY_PS3 || UNITY_XBOX360 || UNITY_FLASH || UNITY_BKACKBERRY || UNITY_WINRT
#define UNITY
#endif
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
#if !UNITY
using System.Diagnostics.Contracts;
#endif // !UNITY
namespace MsgPack
{
partial class MessagePackObjectDictionary
{
/// <summary>
/// Represents the collection of values in a <see cref="MessagePackObjectDictionary"/>.
/// </summary>
#if !SILVERLIGHT && !NETFX_CORE
[Serializable]
#endif
[DebuggerDisplay( "Count={Count}" )]
[DebuggerTypeProxy( typeof( CollectionDebuggerProxy<> ) )]
[SuppressMessage( "Microsoft.Design", "CA1034:NestedTypesShouldNotBeVisible", Justification = "ICollection implementing dictionary should return ICollection implementing values." )]
public sealed partial class ValueCollection : ICollection<MessagePackObject>, ICollection
{
private readonly MessagePackObjectDictionary _dictionary;
/// <summary>
/// Gets the number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1"/>.
/// </summary>
/// <returns>
/// The number of elements contained in the <see cref="T:System.Collections.Generic.ICollection`1"/>.
/// </returns>
public int Count
{
get { return this._dictionary.Count; }
}
bool ICollection<MessagePackObject>.IsReadOnly
{
get { return true; }
}
bool ICollection.IsSynchronized
{
get { return false; }
}
object ICollection.SyncRoot
{
get { return this; }
}
internal ValueCollection( MessagePackObjectDictionary dictionary )
{
#if !UNITY
Contract.Assert( dictionary != null );
#endif // !UNITY
this._dictionary = dictionary;
}
/// <summary>
/// Copies the entire collection to a compatible one-dimensional array, starting at the beginning of the target array.
/// </summary>
/// <param name="array">
/// The one-dimensional <see cref="Array"/> that is the destination of the elements copied from this dictionary.
/// The <see cref="Array"/> must have zero-based indexing.
/// </param>
public void CopyTo( MessagePackObject[] array )
{
if ( array == null )
{
throw new ArgumentNullException( "array" );
}
#if !UNITY
Contract.EndContractBlock();
#endif // !UNITY
CollectionOperation.CopyTo( this, this.Count, 0, array, 0, this.Count );
}
/// <summary>
/// Copies the entire collection to a compatible one-dimensional array,
/// starting at the specified index of the target array.
/// </summary>
/// <param name="array">
/// The one-dimensional <see cref="Array"/> that is the destination of the elements copied from this dictionary.
/// The <see cref="Array"/> must have zero-based indexing.
/// </param>
/// <param name="arrayIndex">
/// The zero-based index in <paramref name="array"/> at which copying begins.
/// </param>
public void CopyTo( MessagePackObject[] array, int arrayIndex )
{
CollectionOperation.CopyTo( this, this.Count, 0, array, arrayIndex, this.Count );
}
/// <summary>
/// Copies a range of elements from this collection to a compatible one-dimensional array,
/// starting at the specified index of the target array.
/// </summary>
/// <param name="index">
/// The zero-based index in the source dictionary at which copying begins.
/// </param>
/// <param name="array">
/// The one-dimensional <see cref="Array"/> that is the destination of the elements copied from this dictionary.
/// The <see cref="Array"/> must have zero-based indexing.
/// </param>
/// <param name="arrayIndex">
/// The zero-based index in <paramref name="array"/> at which copying begins.
/// </param>
/// <param name="count">
/// The number of elements to copy.
/// </param>
public void CopyTo( int index, MessagePackObject[] array, int arrayIndex, int count )
{
if ( array == null )
{
throw new ArgumentNullException( "array" );
}
if ( index < 0 )
{
throw new ArgumentOutOfRangeException( "index" );
}
if ( 0 < this.Count && this.Count <= index )
{
throw new ArgumentException( "Specified array is too small to complete copy operation.", "array" );
}
if ( arrayIndex < 0 )
{
throw new ArgumentOutOfRangeException( "arrayIndex" );
}
if ( count < 0 )
{
throw new ArgumentOutOfRangeException( "count" );
}
if ( array.Length - count <= arrayIndex )
{
throw new ArgumentException( "Specified array is too small to complete copy operation.", "array" );
}
#if !UNITY
Contract.EndContractBlock();
#endif // !UNITY
CollectionOperation.CopyTo( this, this.Count, index, array, arrayIndex, count );
}
void ICollection.CopyTo( Array array, int arrayIndex )
{
CollectionOperation.CopyTo( this, this.Count, array, arrayIndex );
}
/// <summary>
/// Determines whether this collection contains a specific value.
/// </summary>
/// <param name="item">
/// The object to locate in this collection.</param>
/// <returns>
/// <c>true</c> if <paramref name="item"/> is found in this collection; otherwise, <c>false</c>.
/// </returns>
bool ICollection<MessagePackObject>.Contains( MessagePackObject item )
{
return this._dictionary.ContainsValue( item );
}
void ICollection<MessagePackObject>.Add( MessagePackObject item )
{
throw new NotSupportedException();
}
void ICollection<MessagePackObject>.Clear()
{
throw new NotSupportedException();
}
bool ICollection<MessagePackObject>.Remove( MessagePackObject item )
{
throw new NotSupportedException();
}
/// <summary>
/// Returns an enumerator that iterates through this collction.
/// </summary>
/// <returns>
/// Returns an enumerator that iterates through this collction.
/// </returns>
public Enumerator GetEnumerator()
{
return new Enumerator( this._dictionary );
}
IEnumerator<MessagePackObject> IEnumerable<MessagePackObject>.GetEnumerator()
{
return this.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return this.GetEnumerator();
}
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
using NPOI.OpenXmlFormats.Spreadsheet;
using System;
using System.Xml;
using NPOI.SS.UserModel;
using NPOI.XSSF.UserModel.Extensions;
using NPOI.XSSF.Model;
namespace NPOI.XSSF.UserModel
{
/**
*
* High level representation of the the possible formatting information for the contents of the cells on a sheet in a
* SpreadsheetML document.
*
* @see NPOI.xssf.usermodel.XSSFWorkbook#CreateCellStyle()
* @see NPOI.xssf.usermodel.XSSFWorkbook#getCellStyleAt(short)
* @see NPOI.xssf.usermodel.XSSFCell#setCellStyle(NPOI.ss.usermodel.CellStyle)
*/
public class XSSFCellStyle : ICellStyle
{
private int _cellXfId;
private StylesTable _stylesSource;
private CT_Xf _cellXf;
private CT_Xf _cellStyleXf;
private XSSFFont _font;
private XSSFCellAlignment _cellAlignment;
private ThemesTable _theme;
/**
* Creates a Cell Style from the supplied parts
* @param cellXfId The main XF for the cell. Must be a valid 0-based index into the XF table
* @param cellStyleXfId Optional, style xf. A value of <code>-1</code> means no xf.
* @param stylesSource Styles Source to work off
*/
public XSSFCellStyle(int cellXfId, int cellStyleXfId, StylesTable stylesSource, ThemesTable theme)
{
_cellXfId = cellXfId;
_stylesSource = stylesSource;
_cellXf = stylesSource.GetCellXfAt(this._cellXfId);
_cellStyleXf = cellStyleXfId == -1 ? null : stylesSource.GetCellStyleXfAt(cellStyleXfId);
_theme = theme;
}
/**
* Used so that StylesSource can figure out our location
*/
public CT_Xf GetCoreXf()
{
return _cellXf;
}
/**
* Used so that StylesSource can figure out our location
*/
public CT_Xf GetStyleXf()
{
return _cellStyleXf;
}
/// <summary>
/// Creates an empty Cell Style
/// </summary>
/// <param name="stylesSource"></param>
public XSSFCellStyle(StylesTable stylesSource)
{
_stylesSource = stylesSource;
// We need a new CT_Xf for the main styles
// TODO decide on a style ctxf
_cellXf = new CT_Xf();
_cellStyleXf = null;
}
/**
* Verifies that this style belongs to the supplied Workbook
* Styles Source.
* Will throw an exception if it belongs to a different one.
* This is normally called when trying to assign a style to a
* cell, to ensure the cell and the style are from the same
* workbook (if they're not, it won't work)
* @throws ArgumentException if there's a workbook mis-match
*/
public void VerifyBelongsToStylesSource(StylesTable src)
{
if (this._stylesSource != src)
{
throw new ArgumentException("This Style does not belong to the supplied Workbook Stlyes Source. Are you trying to assign a style from one workbook to the cell of a differnt workbook?");
}
}
/**
* Clones all the style information from another
* XSSFCellStyle, onto this one. This
* XSSFCellStyle will then have all the same
* properties as the source, but the two may
* be edited independently.
* Any stylings on this XSSFCellStyle will be lost!
*
* The source XSSFCellStyle could be from another
* XSSFWorkbook if you like. This allows you to
* copy styles from one XSSFWorkbook to another.
*/
public void CloneStyleFrom(ICellStyle source)
{
if (source is XSSFCellStyle)
{
XSSFCellStyle src = (XSSFCellStyle)source;
// Is it on our Workbook?
if (src._stylesSource == _stylesSource)
{
// Nice and easy
_cellXf = src.GetCoreXf().Copy();
_cellStyleXf = src.GetStyleXf().Copy();
}
else
{
// Copy the style
try
{
// Remove any children off the current style, to
// avoid orphaned nodes
if (_cellXf.IsSetAlignment())
_cellXf.UnsetAlignment();
if (_cellXf.IsSetExtLst())
_cellXf.UnsetExtLst();
// Create a new Xf with the same contents
_cellXf =
src.GetCoreXf().Copy();
// bug 56295: ensure that the fills is available and set correctly
CT_Fill fill = CT_Fill.Parse(src.GetCTFill().ToString());
AddFill(fill);
// Swap it over
_stylesSource.ReplaceCellXfAt(_cellXfId, _cellXf);
}
catch (XmlException e)
{
throw new POIXMLException(e);
}
// Copy the format
String fmt = src.GetDataFormatString();
DataFormat = (
(new XSSFDataFormat(_stylesSource)).GetFormat(fmt)
);
// Copy the font
try
{
CT_Font ctFont =
src.GetFont().GetCTFont().Clone();
XSSFFont font = new XSSFFont(ctFont);
font.RegisterTo(_stylesSource);
SetFont(font);
}
catch (XmlException e)
{
throw new POIXMLException(e);
}
}
// Clear out cached details
_font = null;
_cellAlignment = null;
}
else
{
throw new ArgumentException("Can only clone from one XSSFCellStyle to another, not between HSSFCellStyle and XSSFCellStyle");
}
}
private void AddFill(CT_Fill fill)
{
int idx = _stylesSource.PutFill(new XSSFCellFill(fill));
_cellXf.fillId = (uint)(idx);
_cellXf.applyFill = (true);
}
public HorizontalAlignment Alignment
{
get
{
return GetAlignmentEnum();
}
set
{
GetCellAlignment().Horizontal = value;
}
}
/// <summary>
/// Get the type of horizontal alignment for the cell
/// </summary>
/// <returns>the type of alignment</returns>
internal HorizontalAlignment GetAlignmentEnum()
{
CT_CellAlignment align = _cellXf.alignment;
if (align != null && align.IsSetHorizontal())
{
return (HorizontalAlignment)align.horizontal;
}
return HorizontalAlignment.General;
}
public BorderStyle BorderBottom
{
get
{
if (!_cellXf.applyBorder) return BorderStyle.None;
int idx = (int)_cellXf.borderId;
CT_Border ct = _stylesSource.GetBorderAt(idx).GetCTBorder();
if (!ct.IsSetBottom())
{
return BorderStyle.None;
}
else
{
return (BorderStyle)ct.bottom.style;
}
}
set
{
CT_Border ct = GetCTBorder();
CT_BorderPr pr = ct.IsSetBottom() ? ct.bottom : ct.AddNewBottom();
if (value == BorderStyle.None) ct.unsetBottom();
else pr.style = (ST_BorderStyle)value;
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
}
public BorderStyle BorderLeft
{
get
{
if (!_cellXf.applyBorder) return BorderStyle.None;
int idx = (int)_cellXf.borderId;
CT_Border ct = _stylesSource.GetBorderAt(idx).GetCTBorder();
if (!ct.IsSetLeft())
{
return BorderStyle.None;
}
else
{
return (BorderStyle)ct.left.style;
}
}
set
{
CT_Border ct = GetCTBorder();
CT_BorderPr pr = ct.IsSetLeft() ? ct.left : ct.AddNewLeft();
if (value == BorderStyle.None) ct.unsetLeft();
else pr.style = (ST_BorderStyle)value;
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
}
/// <summary>
/// Get the type of border to use for the right border of the cell
/// </summary>
public BorderStyle BorderRight
{
get
{
if (!_cellXf.applyBorder) return BorderStyle.None;
int idx = (int)_cellXf.borderId;
CT_Border ct = _stylesSource.GetBorderAt(idx).GetCTBorder();
if (!ct.IsSetRight())
{
return BorderStyle.None;
}
else
{
return (BorderStyle)ct.right.style;
}
}
set
{
CT_Border ct = GetCTBorder();
CT_BorderPr pr = ct.IsSetRight() ? ct.right : ct.AddNewRight();
if (value == BorderStyle.None) ct.unsetRight();
else pr.style = (ST_BorderStyle)value;
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
}
public BorderStyle BorderTop
{
get
{
if (!_cellXf.applyBorder) return BorderStyle.None;
int idx = (int)_cellXf.borderId;
CT_Border ct = _stylesSource.GetBorderAt(idx).GetCTBorder();
if (!ct.IsSetTop())
{
return BorderStyle.None;
}
else
{
return (BorderStyle)ct.top.style;
}
}
set
{
CT_Border ct = GetCTBorder();
CT_BorderPr pr = ct.IsSetTop() ? ct.top : ct.AddNewTop();
if (value == BorderStyle.None) ct.unsetTop();
else pr.style = (ST_BorderStyle)value;
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
}
/**
* Get the color to use for the bottom border
* Color is optional. When missing, IndexedColors.Automatic is implied.
* @return the index of the color defInition, default value is {@link NPOI.ss.usermodel.IndexedColors#AUTOMATIC}
* @see NPOI.ss.usermodel.IndexedColors
*/
public short BottomBorderColor
{
get
{
XSSFColor clr = BottomBorderXSSFColor;
return clr == null ? IndexedColors.Black.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = value;
SetBottomBorderColor(clr);
}
}
/**
* Get the color to use for the bottom border as a {@link XSSFColor}
*
* @return the used color or <code>null</code> if not Set
*/
public XSSFColor BottomBorderXSSFColor
{
get
{
if (!_cellXf.applyBorder) return null;
int idx = (int)_cellXf.borderId;
XSSFCellBorder border = _stylesSource.GetBorderAt(idx);
return border.GetBorderColor(BorderSide.BOTTOM);
}
}
/**
* Get the index of the number format (numFmt) record used by this cell format.
*
* @return the index of the number format
*/
public short DataFormat
{
get
{
return (short)_cellXf.numFmtId;
}
set
{
// XSSF supports >32,767 formats
SetDataFormat(value & 0xFFFF);
}
}
/**
* Set the index of a data format
*
* @param fmt the index of a data format
*/
public void SetDataFormat(int fmt)
{
_cellXf.applyNumberFormat = (true);
_cellXf.numFmtId = (uint)(fmt);
}
/**
* Get the contents of the format string, by looking up
* the StylesSource
*
* @return the number format string
*/
public String GetDataFormatString()
{
int idx = DataFormat;
return new XSSFDataFormat(_stylesSource).GetFormat((short)idx);
}
/// <summary>
/// Get the background fill color.
/// Note - many cells are actually filled with a foreground fill, not a background fill
/// </summary>
public short FillBackgroundColor
{
get
{
XSSFColor clr = (XSSFColor)this.FillBackgroundColorColor;
return clr == null ? IndexedColors.Automatic.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = (value);
SetFillBackgroundColor(clr);
}
}
/**
* Get the background fill color.
* <p>
* Note - many cells are actually Filled with a foreground
* Fill, not a background fill - see {@link #getFillForegroundColor()}
* </p>
* @see NPOI.xssf.usermodel.XSSFColor#getRgb()
* @return XSSFColor - fill color or <code>null</code> if not Set
*/
public IColor FillBackgroundColorColor
{
get
{
return this.FillBackgroundXSSFColor;
}
set
{
this.FillBackgroundXSSFColor = (XSSFColor)value;
}
}
public XSSFColor FillBackgroundXSSFColor
{
get
{
// bug 56295: handle missing applyFill attribute as "true" because Excel does as well
if (_cellXf.IsSetApplyFill() && !_cellXf.applyFill) return null;
int fillIndex = (int)_cellXf.fillId;
XSSFCellFill fg = _stylesSource.GetFillAt(fillIndex);
XSSFColor fillBackgroundColor = fg.GetFillBackgroundColor();
if (fillBackgroundColor != null && _theme != null)
{
_theme.InheritFromThemeAsRequired(fillBackgroundColor);
}
return fillBackgroundColor;
}
set
{
CT_Fill ct = GetCTFill();
CT_PatternFill ptrn = ct.patternFill;
if (value == null)
{
if (ptrn != null) ptrn.UnsetBgColor();
}
else
{
if (ptrn == null) ptrn = ct.AddNewPatternFill();
ptrn.bgColor = (value.GetCTColor());
}
AddFill(ct);
}
}
/**
* Get the foreground fill color.
* <p>
* Many cells are Filled with this, instead of a
* background color ({@link #getFillBackgroundColor()})
* </p>
* @see IndexedColors
* @return fill color, default value is {@link NPOI.ss.usermodel.IndexedColors#AUTOMATIC}
*/
public short FillForegroundColor
{
get
{
XSSFColor clr = (XSSFColor)this.FillForegroundColorColor;
return clr == null ? IndexedColors.Automatic.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = (value);
SetFillForegroundColor(clr);
}
}
/// <summary>
/// Get the foreground fill color.
/// </summary>
public IColor FillForegroundColorColor
{
get
{
return this.FillForegroundXSSFColor;
}
set
{
this.FillForegroundXSSFColor = (XSSFColor)value;
}
}
/// <summary>
/// Get the foreground fill color.
/// </summary>
public XSSFColor FillForegroundXSSFColor
{
get
{
// bug 56295: handle missing applyFill attribute as "true" because Excel does as well
if (_cellXf.IsSetApplyFill() && !_cellXf.applyFill) return null;
int fillIndex = (int)_cellXf.fillId;
XSSFCellFill fg = _stylesSource.GetFillAt(fillIndex);
XSSFColor fillForegroundColor = fg.GetFillForegroundColor();
if (fillForegroundColor != null && _theme != null)
{
_theme.InheritFromThemeAsRequired(fillForegroundColor);
}
return fillForegroundColor;
}
set
{
CT_Fill ct = GetCTFill();
CT_PatternFill ptrn = ct.patternFill;
if (value == null)
{
if (ptrn != null) ptrn.UnsetFgColor();
}
else
{
if (ptrn == null) ptrn = ct.AddNewPatternFill();
ptrn.fgColor = (value.GetCTColor());
}
AddFill(ct);
}
}
public FillPattern FillPattern
{
get
{
// bug 56295: handle missing applyFill attribute as "true" because Excel does as well
if (_cellXf.IsSetApplyFill() && !_cellXf.applyFill) return 0;
int FillIndex = (int)_cellXf.fillId;
XSSFCellFill fill = _stylesSource.GetFillAt(FillIndex);
ST_PatternType ptrn = fill.GetPatternType();
if(ptrn == ST_PatternType.none) return FillPattern.NoFill;
return (FillPattern)((int)ptrn);
}
set
{
CT_Fill ct = GetCTFill();
CT_PatternFill ptrn = ct.IsSetPatternFill() ? ct.GetPatternFill() : ct.AddNewPatternFill();
if (value == FillPattern.NoFill && ptrn.IsSetPatternType())
ptrn.UnsetPatternType();
else ptrn.patternType = (ST_PatternType)(value);
AddFill(ct);
}
}
/**
* Gets the font for this style
* @return Font - font
*/
public XSSFFont GetFont()
{
if (_font == null)
{
_font = _stylesSource.GetFontAt(FontId);
}
return _font;
}
/**
* Gets the index of the font for this style
*
* @return short - font index
* @see NPOI.xssf.usermodel.XSSFWorkbook#getFontAt(short)
*/
public short FontIndex
{
get
{
return (short)FontId;
}
}
/**
* Get whether the cell's using this style are to be hidden
*
* @return bool - whether the cell using this style is hidden
*/
public bool IsHidden
{
get
{
if (!_cellXf.IsSetProtection() || !_cellXf.protection.IsSetHidden())
{
return false;
}
return _cellXf.protection.hidden;
}
set
{
if (!_cellXf.IsSetProtection())
{
_cellXf.AddNewProtection();
}
_cellXf.protection.hidden = (value);
}
}
/**
* Get the number of spaces to indent the text in the cell
*
* @return indent - number of spaces
*/
public short Indention
{
get
{
CT_CellAlignment align = _cellXf.alignment;
return (short)(align == null ? 0 : align.indent);
}
set
{
GetCellAlignment().Indent = value;
}
}
/**
* Get the index within the StylesTable (sequence within the collection of CT_Xf elements)
*
* @return unique index number of the underlying record this style represents
*/
public short Index
{
get
{
return (short)this._cellXfId;
}
}
protected internal int UIndex
{
get
{
return this._cellXfId;
}
}
/**
* Get the color to use for the left border
*
* @return the index of the color defInition, default value is {@link NPOI.ss.usermodel.IndexedColors#BLACK}
* @see NPOI.ss.usermodel.IndexedColors
*/
public short LeftBorderColor
{
get
{
XSSFColor clr = LeftBorderXSSFColor;
return clr == null ? IndexedColors.Black.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = (value);
SetLeftBorderColor(clr);
}
}
public XSSFColor DiagonalBorderXSSFColor
{
get
{
if (!_cellXf.applyBorder) return null;
int idx = (int)_cellXf.borderId;
XSSFCellBorder border = _stylesSource.GetBorderAt(idx);
return border.GetBorderColor(BorderSide.DIAGONAL);
}
}
/**
* Get the color to use for the left border
*
* @return the index of the color defInition or <code>null</code> if not Set
* @see NPOI.ss.usermodel.IndexedColors
*/
public XSSFColor LeftBorderXSSFColor
{
get
{
if (!_cellXf.applyBorder) return null;
int idx = (int)_cellXf.borderId;
XSSFCellBorder border = _stylesSource.GetBorderAt(idx);
return border.GetBorderColor(BorderSide.LEFT);
}
}
/// <summary>
/// Get whether the cell's using this style are locked
/// </summary>
public bool IsLocked
{
get
{
if (!_cellXf.IsSetProtection())
{
return true;
}
return _cellXf.protection.locked;
}
set
{
if (!_cellXf.IsSetProtection())
{
_cellXf.AddNewProtection();
}
_cellXf.protection.locked = value;
}
}
/// <summary>
/// Get the color to use for the right border
/// </summary>
public short RightBorderColor
{
get
{
XSSFColor clr = RightBorderXSSFColor;
return clr == null ? IndexedColors.Black.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = (value);
SetRightBorderColor(clr);
}
}
/// <summary>
/// Get the color to use for the right border
/// </summary>
/// <returns></returns>
public XSSFColor RightBorderXSSFColor
{
get
{
if (!_cellXf.applyBorder) return null;
int idx = (int)_cellXf.borderId;
XSSFCellBorder border = _stylesSource.GetBorderAt(idx);
return border.GetBorderColor(BorderSide.RIGHT);
}
}
/// <summary>
/// Get the degree of rotation (between 0 and 180 degrees) for the text in the cell
/// </summary>
/// <example>
/// Expressed in degrees. Values range from 0 to 180. The first letter of
/// the text is considered the center-point of the arc.
/// For 0 - 90, the value represents degrees above horizon. For 91-180 the degrees below the horizon is calculated as:
/// <code>[degrees below horizon] = 90 - textRotation.</code>
/// </example>
public short Rotation
{
get
{
CT_CellAlignment align = _cellXf.alignment;
return (short)(align == null ? 0 : align.textRotation);
}
set
{
GetCellAlignment().TextRotation = value;
}
}
/**
* Get the color to use for the top border
*
* @return the index of the color defInition, default value is {@link NPOI.ss.usermodel.IndexedColors#BLACK}
* @see NPOI.ss.usermodel.IndexedColors
*/
public short TopBorderColor
{
get
{
XSSFColor clr = TopBorderXSSFColor;
return clr == null ? IndexedColors.Black.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = (value);
SetTopBorderColor(clr);
}
}
/// <summary>
/// Get the color to use for the top border
/// </summary>
/// <returns></returns>
public XSSFColor TopBorderXSSFColor
{
get
{
if (!_cellXf.applyBorder) return null;
int idx = (int)_cellXf.borderId;
XSSFCellBorder border = _stylesSource.GetBorderAt(idx);
return border.GetBorderColor(BorderSide.TOP);
}
}
/// <summary>
/// Get the type of vertical alignment for the cell
/// </summary>
public VerticalAlignment VerticalAlignment
{
get
{
return GetVerticalAlignmentEnum();
}
set
{
GetCellAlignment().Vertical = value;
}
}
/// <summary>
/// Get the type of vertical alignment for the cell
/// </summary>
/// <returns></returns>
internal VerticalAlignment GetVerticalAlignmentEnum()
{
CT_CellAlignment align = _cellXf.alignment;
if (align != null && align.IsSetVertical())
{
return (VerticalAlignment)align.vertical;
}
return VerticalAlignment.Bottom;
}
/// <summary>
/// Whether the text in a cell should be line-wrapped within the cell.
/// </summary>
public bool WrapText
{
get
{
CT_CellAlignment align = _cellXf.alignment;
return align != null && align.wrapText;
}
set
{
GetCellAlignment().WrapText = value;
}
}
/**
* Set the color to use for the bottom border
*
* @param color the color to use, null means no color
*/
public void SetBottomBorderColor(XSSFColor color)
{
CT_Border ct = GetCTBorder();
if (color == null && !ct.IsSetBottom()) return;
CT_BorderPr pr = ct.IsSetBottom() ? ct.bottom : ct.AddNewBottom();
if (color != null) pr.SetColor(color.GetCTColor());
else pr.UnsetColor();
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
/**
* Set the background fill color represented as a {@link XSSFColor} value.
* <p>
* For example:
* <pre>
* cs.SetFillPattern(XSSFCellStyle.FINE_DOTS );
* cs.SetFillBackgroundXSSFColor(new XSSFColor(java.awt.Color.RED));
* </pre>
* optionally a Foreground and background fill can be applied:
* <i>Note: Ensure Foreground color is set prior to background</i>
* <pre>
* cs.SetFillPattern(XSSFCellStyle.FINE_DOTS );
* cs.SetFillForegroundColor(new XSSFColor(java.awt.Color.BLUE));
* cs.SetFillBackgroundColor(new XSSFColor(java.awt.Color.GREEN));
* </pre>
* or, for the special case of SOLID_FILL:
* <pre>
* cs.SetFillPattern(XSSFCellStyle.SOLID_FOREGROUND );
* cs.SetFillForegroundColor(new XSSFColor(java.awt.Color.GREEN));
* </pre>
* It is necessary to set the fill style in order
* for the color to be shown in the cell.
*
* @param color - the color to use
*/
public void SetFillBackgroundColor(XSSFColor color)
{
CT_Fill ct = GetCTFill();
CT_PatternFill ptrn = ct.GetPatternFill();
if (color == null)
{
if (ptrn != null) ptrn.UnsetBgColor();
}
else
{
if (ptrn == null) ptrn = ct.AddNewPatternFill();
ptrn.bgColor = color.GetCTColor();
}
int idx = _stylesSource.PutFill(new XSSFCellFill(ct));
_cellXf.fillId = (uint)idx;
_cellXf.applyFill = (true);
}
/**
* Set the foreground fill color represented as a {@link XSSFColor} value.
* <br/>
* <i>Note: Ensure Foreground color is Set prior to background color.</i>
* @param color the color to use
* @see #setFillBackgroundColor(NPOI.xssf.usermodel.XSSFColor) )
*/
public void SetFillForegroundColor(XSSFColor color)
{
CT_Fill ct = GetCTFill();
CT_PatternFill ptrn = ct.GetPatternFill();
if (color == null)
{
if (ptrn != null) ptrn.UnsetFgColor();
}
else
{
if (ptrn == null) ptrn = ct.AddNewPatternFill();
ptrn.fgColor = (color.GetCTColor());
}
int idx = _stylesSource.PutFill(new XSSFCellFill(ct));
_cellXf.fillId = (uint)idx;
_cellXf.applyFill = (true);
}
/**
* Get a <b>copy</b> of the currently used CT_Fill, if none is used, return a new instance.
*/
public CT_Fill GetCTFill()
{
CT_Fill ct;
// bug 56295: handle missing applyFill attribute as "true" because Excel does as well
if (!_cellXf.IsSetApplyFill() || _cellXf.applyFill)
{
int FillIndex = (int)_cellXf.fillId;
XSSFCellFill cf = _stylesSource.GetFillAt(FillIndex);
ct = (CT_Fill)cf.GetCTFill().Copy();
}
else
{
ct = new CT_Fill();
}
return ct;
}
/**
* Get a <b>copy</b> of the currently used CT_Border, if none is used, return a new instance.
*/
public CT_Border GetCTBorder()
{
CT_Border ctBorder;
if (_cellXf.applyBorder)
{
int idx = (int)_cellXf.borderId;
XSSFCellBorder cf = _stylesSource.GetBorderAt(idx);
ctBorder = (CT_Border)cf.GetCTBorder().Copy();
}
else
{
ctBorder = new CT_Border();
ctBorder.AddNewLeft();
ctBorder.AddNewRight();
ctBorder.AddNewTop();
ctBorder.AddNewBottom();
ctBorder.AddNewDiagonal();
}
return ctBorder;
}
/**
* Set the font for this style
*
* @param font a font object Created or retreived from the XSSFWorkbook object
* @see NPOI.xssf.usermodel.XSSFWorkbook#CreateFont()
* @see NPOI.xssf.usermodel.XSSFWorkbook#getFontAt(short)
*/
public void SetFont(IFont font)
{
if (font != null)
{
long index = font.Index;
this._cellXf.fontId = (uint)index;
this._cellXf.fontIdSpecified = true;
this._cellXf.applyFont = (true);
}
else
{
this._cellXf.applyFont = (false);
}
}
public void SetDiagonalBorderColor(XSSFColor color)
{
CT_Border ct = GetCTBorder();
if (color == null && !ct.IsSetDiagonal()) return;
CT_BorderPr pr = ct.IsSetDiagonal() ? ct.diagonal : ct.AddNewDiagonal();
if (color != null) pr.color = (color.GetCTColor());
else pr.UnsetColor();
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
/**
* Set the color to use for the left border as a {@link XSSFColor} value
*
* @param color the color to use
*/
public void SetLeftBorderColor(XSSFColor color)
{
CT_Border ct = GetCTBorder();
if (color == null && !ct.IsSetLeft()) return;
CT_BorderPr pr = ct.IsSetLeft() ? ct.left : ct.AddNewLeft();
if (color != null) pr.color = (color.GetCTColor());
else pr.UnsetColor();
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
/**
* Set the color to use for the right border as a {@link XSSFColor} value
*
* @param color the color to use
*/
public void SetRightBorderColor(XSSFColor color)
{
CT_Border ct = GetCTBorder();
if (color == null && !ct.IsSetRight()) return;
CT_BorderPr pr = ct.IsSetRight() ? ct.right : ct.AddNewRight();
if (color != null) pr.color = (color.GetCTColor());
else pr.UnsetColor();
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)(idx);
_cellXf.applyBorder = (true);
}
/**
* Set the color to use for the top border as a {@link XSSFColor} value
*
* @param color the color to use
*/
public void SetTopBorderColor(XSSFColor color)
{
CT_Border ct = GetCTBorder();
if (color == null && !ct.IsSetTop()) return;
CT_BorderPr pr = ct.IsSetTop() ? ct.top : ct.AddNewTop();
if (color != null) pr.color = color.GetCTColor();
else pr.UnsetColor();
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
/**
* Set the type of vertical alignment for the cell
*
* @param align - align the type of alignment
* @see NPOI.ss.usermodel.CellStyle#VERTICAL_TOP
* @see NPOI.ss.usermodel.CellStyle#VERTICAL_CENTER
* @see NPOI.ss.usermodel.CellStyle#VERTICAL_BOTTOM
* @see NPOI.ss.usermodel.CellStyle#VERTICAL_JUSTIFY
* @see NPOI.ss.usermodel.VerticalAlignment
*/
public void SetVerticalAlignment(short align)
{
GetCellAlignment().Vertical = (VerticalAlignment)align;
}
/**
* Gets border color
*
* @param side the border side
* @return the used color
*/
public XSSFColor GetBorderColor(BorderSide side)
{
switch (side)
{
case BorderSide.BOTTOM:
return BottomBorderXSSFColor;
case BorderSide.RIGHT:
return RightBorderXSSFColor;
case BorderSide.TOP:
return TopBorderXSSFColor;
case BorderSide.LEFT:
return LeftBorderXSSFColor;
default:
throw new ArgumentException("Unknown border: " + side);
}
}
/**
* Set the color to use for the selected border
*
* @param side - where to apply the color defInition
* @param color - the color to use
*/
public void SetBorderColor(BorderSide side, XSSFColor color)
{
switch (side)
{
case BorderSide.BOTTOM:
SetBottomBorderColor(color);
break;
case BorderSide.RIGHT:
SetRightBorderColor(color);
break;
case BorderSide.TOP:
SetTopBorderColor(color);
break;
case BorderSide.LEFT:
SetLeftBorderColor(color);
break;
}
}
private int FontId
{
get
{
if (_cellXf.IsSetFontId())
{
return (int)_cellXf.fontId;
}
return (int)_cellStyleXf.fontId;
}
}
/**
* Get the cellAlignment object to use for manage alignment
* @return XSSFCellAlignment - cell alignment
*/
internal XSSFCellAlignment GetCellAlignment()
{
if (this._cellAlignment == null)
{
this._cellAlignment = new XSSFCellAlignment(GetCTCellAlignment());
}
return this._cellAlignment;
}
/**
* Return the CT_CellAlignment instance for alignment
*
* @return CT_CellAlignment
*/
internal CT_CellAlignment GetCTCellAlignment()
{
if (_cellXf.alignment == null)
{
_cellXf.alignment = new CT_CellAlignment();
}
return _cellXf.alignment;
}
/**
* Returns a hash code value for the object. The hash is derived from the underlying CT_Xf bean.
*
* @return the hash code value for this style
*/
public override int GetHashCode()
{
return _cellXf.ToString().GetHashCode();
}
/**
* Checks is the supplied style is equal to this style
*
* @param o the style to check
* @return true if the supplied style is equal to this style
*/
public override bool Equals(Object o)
{
if (o == null || !(o is XSSFCellStyle)) return false;
XSSFCellStyle cf = (XSSFCellStyle)o;
return _cellXf.ToString().Equals(cf.GetCoreXf().ToString());
}
/**
* Make a copy of this style. The underlying CT_Xf bean is Cloned,
* the references to Fills and borders remain.
*
* @return a copy of this style
*/
public Object Clone()
{
CT_Xf xf = (CT_Xf)_cellXf.Copy();
int xfSize = _stylesSource.StyleXfsSize;
int indexXf = _stylesSource.PutCellXf(xf);
return new XSSFCellStyle(indexXf - 1, xfSize - 1, _stylesSource, _theme);
}
#region ICellStyle Members
public IFont GetFont(IWorkbook parentWorkbook)
{
return this.GetFont();
}
public bool ShrinkToFit
{
get
{
CT_CellAlignment align = _cellXf.alignment;
return align != null && align.shrinkToFit;
}
set
{
GetCTCellAlignment().shrinkToFit = value;
}
}
public short BorderDiagonalColor
{
get
{
XSSFColor clr = DiagonalBorderXSSFColor;
return clr == null ? IndexedColors.Black.Index : clr.Indexed;
}
set
{
XSSFColor clr = new XSSFColor();
clr.Indexed = (value);
SetDiagonalBorderColor(clr);
}
}
public BorderStyle BorderDiagonalLineStyle
{
get
{
if (!_cellXf.applyBorder) return BorderStyle.None;
int idx = (int)_cellXf.borderId;
CT_Border ct = _stylesSource.GetBorderAt(idx).GetCTBorder();
if (!ct.IsSetDiagonal())
{
return BorderStyle.None;
}
else
{
return (BorderStyle)ct.diagonal.style;
}
}
set
{
CT_Border ct = GetCTBorder();
CT_BorderPr pr = ct.IsSetDiagonal() ? ct.diagonal : ct.AddNewDiagonal();
if (value == BorderStyle.None)
ct.unsetDiagonal();
else
pr.style = (ST_BorderStyle)value;
int idx = _stylesSource.PutBorder(new XSSFCellBorder(ct, _theme));
_cellXf.borderId = (uint)idx;
_cellXf.applyBorder = (true);
}
}
public BorderDiagonal BorderDiagonal
{
get
{
CT_Border ct = GetCTBorder();
if (ct.diagonalDown == true && ct.diagonalUp == true)
return BorderDiagonal.Both;
else if (ct.diagonalDown == true)
return BorderDiagonal.Backward;
else if (ct.diagonalUp == true)
return BorderDiagonal.Forward;
else
return BorderDiagonal.None;
}
set
{
CT_Border ct = GetCTBorder();
if (value == BorderDiagonal.Both)
{
ct.diagonalDown = true;
ct.diagonalDownSpecified = true;
ct.diagonalUp = true;
ct.diagonalUpSpecified = true;
}
else if (value == BorderDiagonal.Forward)
{
ct.diagonalDown = false;
ct.diagonalDownSpecified = false;
ct.diagonalUp = true;
ct.diagonalUpSpecified = true;
}
else if (value == BorderDiagonal.Backward)
{
ct.diagonalDown = true;
ct.diagonalDownSpecified = true;
ct.diagonalUp = false;
ct.diagonalUpSpecified = false;
}
else
{
ct.unsetDiagonal();
ct.diagonalDown = false;
ct.diagonalDownSpecified = false;
ct.diagonalUp = false;
ct.diagonalUpSpecified = false;
}
}
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/******************************************************************************
* This file is auto-generated from a template file by the GenerateTests.csx *
* script in tests\src\JIT\HardwareIntrinsics\X86\Shared. In order to make *
* changes, please update the corresponding template and run according to the *
* directions listed in the file. *
******************************************************************************/
using System;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Intrinsics;
using System.Runtime.Intrinsics.X86;
namespace JIT.HardwareIntrinsics.X86
{
public static partial class Program
{
private static void AndNotDouble()
{
var test = new SimpleBinaryOpTest__AndNotDouble();
if (test.IsSupported)
{
// Validates basic functionality works, using Unsafe.Read
test.RunBasicScenario_UnsafeRead();
// Validates basic functionality works, using Load
test.RunBasicScenario_Load();
// Validates basic functionality works, using LoadAligned
test.RunBasicScenario_LoadAligned();
// Validates calling via reflection works, using Unsafe.Read
test.RunReflectionScenario_UnsafeRead();
// Validates calling via reflection works, using Load
test.RunReflectionScenario_Load();
// Validates calling via reflection works, using LoadAligned
test.RunReflectionScenario_LoadAligned();
// Validates passing a static member works
test.RunClsVarScenario();
// Validates passing a local works, using Unsafe.Read
test.RunLclVarScenario_UnsafeRead();
// Validates passing a local works, using Load
test.RunLclVarScenario_Load();
// Validates passing a local works, using LoadAligned
test.RunLclVarScenario_LoadAligned();
// Validates passing the field of a local works
test.RunLclFldScenario();
// Validates passing an instance member works
test.RunFldScenario();
}
else
{
// Validates we throw on unsupported hardware
test.RunUnsupportedScenario();
}
if (!test.Succeeded)
{
throw new Exception("One or more scenarios did not complete as expected.");
}
}
}
public sealed unsafe class SimpleBinaryOpTest__AndNotDouble
{
private const int VectorSize = 32;
private const int ElementCount = VectorSize / sizeof(Double);
private static Double[] _data1 = new Double[ElementCount];
private static Double[] _data2 = new Double[ElementCount];
private static Vector256<Double> _clsVar1;
private static Vector256<Double> _clsVar2;
private Vector256<Double> _fld1;
private Vector256<Double> _fld2;
private SimpleBinaryOpTest__DataTable<Double> _dataTable;
static SimpleBinaryOpTest__AndNotDouble()
{
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); _data2[i] = (double)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar1), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _clsVar2), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize);
}
public SimpleBinaryOpTest__AndNotDouble()
{
Succeeded = true;
var random = new Random();
for (var i = 0; i < ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); _data2[i] = (double)(random.NextDouble()); }
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld1), ref Unsafe.As<Double, byte>(ref _data1[0]), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Vector256<Double>, byte>(ref _fld2), ref Unsafe.As<Double, byte>(ref _data2[0]), VectorSize);
for (var i = 0; i < ElementCount; i++) { _data1[i] = (double)(random.NextDouble()); _data2[i] = (double)(random.NextDouble()); }
_dataTable = new SimpleBinaryOpTest__DataTable<Double>(_data1, _data2, new Double[ElementCount], VectorSize);
}
public bool IsSupported => Avx.IsSupported;
public bool Succeeded { get; set; }
public void RunBasicScenario_UnsafeRead()
{
var result = Avx.AndNot(
Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr)
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_Load()
{
var result = Avx.AndNot(
Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunBasicScenario_LoadAligned()
{
var result = Avx.AndNot(
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr))
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_UnsafeRead()
{
var result = typeof(Avx).GetMethod(nameof(Avx.AndNot), new Type[] { typeof(Vector256<Double>), typeof(Vector256<Double>) })
.Invoke(null, new object[] {
Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr),
Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr)
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_Load()
{
var result = typeof(Avx).GetMethod(nameof(Avx.AndNot), new Type[] { typeof(Vector256<Double>), typeof(Vector256<Double>) })
.Invoke(null, new object[] {
Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunReflectionScenario_LoadAligned()
{
var result = typeof(Avx).GetMethod(nameof(Avx.AndNot), new Type[] { typeof(Vector256<Double>), typeof(Vector256<Double>) })
.Invoke(null, new object[] {
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr)),
Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr))
});
Unsafe.Write(_dataTable.outArrayPtr, (Vector256<Double>)(result));
ValidateResult(_dataTable.inArray1Ptr, _dataTable.inArray2Ptr, _dataTable.outArrayPtr);
}
public void RunClsVarScenario()
{
var result = Avx.AndNot(
_clsVar1,
_clsVar2
);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_clsVar1, _clsVar2, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_UnsafeRead()
{
var left = Unsafe.Read<Vector256<Double>>(_dataTable.inArray1Ptr);
var right = Unsafe.Read<Vector256<Double>>(_dataTable.inArray2Ptr);
var result = Avx.AndNot(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_Load()
{
var left = Avx.LoadVector256((Double*)(_dataTable.inArray1Ptr));
var right = Avx.LoadVector256((Double*)(_dataTable.inArray2Ptr));
var result = Avx.AndNot(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclVarScenario_LoadAligned()
{
var left = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray1Ptr));
var right = Avx.LoadAlignedVector256((Double*)(_dataTable.inArray2Ptr));
var result = Avx.AndNot(left, right);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(left, right, _dataTable.outArrayPtr);
}
public void RunLclFldScenario()
{
var test = new SimpleBinaryOpTest__AndNotDouble();
var result = Avx.AndNot(test._fld1, test._fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(test._fld1, test._fld2, _dataTable.outArrayPtr);
}
public void RunFldScenario()
{
var result = Avx.AndNot(_fld1, _fld2);
Unsafe.Write(_dataTable.outArrayPtr, result);
ValidateResult(_fld1, _fld2, _dataTable.outArrayPtr);
}
public void RunUnsupportedScenario()
{
Succeeded = false;
try
{
RunBasicScenario_UnsafeRead();
}
catch (PlatformNotSupportedException)
{
Succeeded = true;
}
}
private void ValidateResult(Vector256<Double> left, Vector256<Double> right, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[ElementCount];
Double[] inArray2 = new Double[ElementCount];
Double[] outArray = new Double[ElementCount];
Unsafe.Write(Unsafe.AsPointer(ref inArray1[0]), left);
Unsafe.Write(Unsafe.AsPointer(ref inArray2[0]), right);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(void* left, void* right, void* result, [CallerMemberName] string method = "")
{
Double[] inArray1 = new Double[ElementCount];
Double[] inArray2 = new Double[ElementCount];
Double[] outArray = new Double[ElementCount];
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray1[0]), ref Unsafe.AsRef<byte>(left), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref inArray2[0]), ref Unsafe.AsRef<byte>(right), VectorSize);
Unsafe.CopyBlockUnaligned(ref Unsafe.As<Double, byte>(ref outArray[0]), ref Unsafe.AsRef<byte>(result), VectorSize);
ValidateResult(inArray1, inArray2, outArray, method);
}
private void ValidateResult(Double[] left, Double[] right, Double[] result, [CallerMemberName] string method = "")
{
if (((~BitConverter.DoubleToInt64Bits(left[0])) & BitConverter.DoubleToInt64Bits(right[0])) != BitConverter.DoubleToInt64Bits(result[0]))
{
Succeeded = false;
}
else
{
for (var i = 1; i < left.Length; i++)
{
if (((~BitConverter.DoubleToInt64Bits(left[i])) & BitConverter.DoubleToInt64Bits(right[i])) != BitConverter.DoubleToInt64Bits(result[i]))
{
Succeeded = false;
break;
}
}
}
if (!Succeeded)
{
Console.WriteLine($"{nameof(Avx)}.{nameof(Avx.AndNot)}<Double>: {method} failed:");
Console.WriteLine($" left: ({string.Join(", ", left)})");
Console.WriteLine($" right: ({string.Join(", ", right)})");
Console.WriteLine($" result: ({string.Join(", ", result)})");
Console.WriteLine();
}
}
}
}
| |
using System.Collections;
using System;
using UnityEngine;
using System.Xml;
/*!
* \brief Manage promoter reactions
* \details This class manage all the promoter reactions
A promoter reaction represents the behaviour of a promoter and of the transcription that it manage (Device).
The promoter responds to a logic input function that should respect the syntax below.
Input function:
==============
Grammar :
EXPR ::= ANDEXPR [OP_OR OREXPR]
ANDEXPR ::= PAREXPR [OP_AND ANDEXPR]
PAREXPR ::= (NOTEXPR | OP_LPAR OREXPR OP_RPAR)
NOTEXPR ::= [OP_NOT] (OPERANDEXPR | BOOL_EXPR)
BOOL_EXPR :: = (OP_TRUE | OP_FALSE)
OPERANDEXPR ::= CONSTANTEXPR WORD
CONSTANTEXPR ::= OP_LHOOK FLOATNUMBER OP_COMMA FLOATNUMBER OP_RHOOK
WORD ::= CHAR [CHAR | NUMBER]
NUMBER ::= (0|1|2|3|4|5|6|7|8|9) [NUMBER]
CHAR ::= (a-z,A-Z)
FLOATNUMBER ::= NUMBER [OP_DOT NUMBER]
Default Operators:
OP_OR ::= "|"
OP_AND ::= "*"
OP_LPAR ::= "("
OP_RPAR ::= ")"
OP_NOT ::= "!"
OP_TRUE :: = "T"
OP_FALSE ::= "F"
OP_LHOOK ::= "["
OP_RHOOK ::= "]"
OP_COMMA ::= ","
OP_DOT ::= "."
Examples :
- T Always true
- F Always false
- [1.2,1]X Activated when [X] >= 1.2 with Stepness = 1
- [1.3,1]X*([2.4,2]Y|[2.5,1]Z) Activated when [X] >= 1.3 with Stepness = 1 AND
([Y] >= 2.4 with Stepness = 2 OR
[Z] >= 2.5 with steepness = 1)
- !([1.3,2]X|[1.4,1]Y) Activated when [X] <= 1.3 with steepness = 2 OR
[Y] <= 1.4 with steepness = 1
... But not only. See below to understand
- ![0.8,1]LacI*[3.4,2]GFP Activated when [LacI] <= 0.8 with steepness = 1 AND
[GFP] >= 3.4 with steepness = 2
Synthax tree and execution:
==========================
A tree is build from the grammar above.
Example :
- [1.3,2]X*![1.4,1]Y create the tree below:
AND
|
---------------------
| |
C NOT
------------ |
| | C
1.4 X ------------
| | |
2 1.4 Y
|
1
Execution :
Here, the tree is left recursive executed.
Each node is executed by a specific function.
See below how each kind of node ares executed :
- AND (*) node: return Min(leftNode, RightNode)
- OR (+) node: return Max(leftNode, rightNode)
- Not (!) node: return 1 - leftNode
- constant (C) node: return hill_function with K parameter = leftNode and n parameter = leftNode of leftNode
- Transcription factor: return concentration of concerned transcription factor
(X or Y in the tree above) node
- Value (1.4, 2, 1.4 and 1 above) node: return the value that it contain.
/!\ hill_function = [X]^n / (K + [X]^n)
This function can be replaced by a stepfunction that correspond to a Hill function with n = +inf
A Device will transcript all the operon and so increase concentration of molecules that it produces.
In order to do this, it needs this parameters:
- Beta -> maximal production rate
- Terminator factor -> between 0-1 that describes the probability that the terminator stop the transcription
- formula -> the result value of the tree above
- Operon : - The Molecule that is transcripted
- The RBS factor (RBSf), between 0-1 that corresponds to the RBS affinity with the ribosomes
To see how the calculus is done, refer you to the react() function of this class.
\attention To understand how to build a PromoterReaction refer you to the PromoterLoader class
*/
using System.Collections.Generic;
public class PromoterReaction : Reaction
{
private float _terminatorFactor;
//! Determine the fiability of the terminator (0-1 which correspond to 0% to 100%)
private TreeNode<PromoterNodeData> _formula;
//! The formula described in the detailed description
protected float _beta;
//! The maximal production of the promoter
public void setBeta (float beta)
{
_beta = beta;
}
public float getBeta ()
{
return _beta;
}
public void setTerminatorFactor (float v)
{
_terminatorFactor = v;
}
public float getTerminatorFactor ()
{
return _terminatorFactor;
}
public void setFormula (TreeNode<PromoterNodeData> tree)
{
_formula = tree;
}
public TreeNode<PromoterNodeData> getFormula ()
{
return _formula;
}
private static PromoterParser _parser = new PromoterParser ();
//!< The Formula Parser
private bool _debug = false;
//! Default Constructor
public PromoterReaction ()
{
}
//! Copy constructor
public PromoterReaction (PromoterReaction r) : base (r)
{
_terminatorFactor = r._terminatorFactor;
_formula = r._formula;
_beta = r._beta;
}
//TODO improve this
private bool formulaEquals (TreeNode<PromoterNodeData> formula1, TreeNode<PromoterNodeData> formula2)
{
string f1 = Logger.ToString<PromoterNodeData> (formula1);
string f2 = Logger.ToString<PromoterNodeData> (formula2);
// Debug.Log(this.GetType() + " formulaEquals (f1==f2)="+(f1==f2)+"f1="+f1+", f2="+f2);
return f1 == f2;
}
/* !
\brief Checks that two reactions have the same PromoterReaction field values.
\param reaction The reaction that will be compared to 'this'.
*/
protected override bool PartialEquals (Reaction reaction)
{
PromoterReaction promoter = reaction as PromoterReaction;
bool bnullProm = (promoter != null);
bool btermFac = (_terminatorFactor == promoter._terminatorFactor);
bool bformula = formulaEquals (_formula, promoter._formula);
bool bbeta = (_beta == promoter._beta);
// Debug.Log(this.GetType() + " PartialEquals"
// +", bnullProm="+bnullProm
// +", btermFac="+btermFac
// +", bformula="+bformula
// +", bbeta="+bbeta
// );
return (promoter != null)
&& base.PartialEquals (reaction)
&& (_terminatorFactor == promoter._terminatorFactor)
//&& _formula.Equals(promoter._formula)
&& formulaEquals (_formula, promoter._formula)
&& (_beta == promoter._beta);
}
/*!
\brief This reaction build a PromoterReaction reaction from a PromoterProperties class
\param props The PromoterProperties which will serve to create the reaction
\return Return the new reaction or null if it fail.
*/
public static Reaction buildPromoterFromProps (PromoterProperties props)
{
if (props == null)
return null;
PromoterParser parser = new PromoterParser ();
PromoterReaction reaction = new PromoterReaction ();
reaction.setName (props.name);
reaction.setBeta (props.beta);
reaction.setTerminatorFactor (props.terminatorFactor);
reaction.setEnergyCost (props.energyCost);
TreeNode<PromoterNodeData> formula = parser.Parse (props.formula);
reaction.setFormula (formula);
Product newProd;
foreach (Product p in props.products) {
newProd = new Product (p);
reaction.addProduct (newProd);
}
return reaction;
}
/*!
Implementation of a Hill function
\param K Threshold value
\param concentration Quantity of the molecule
\param n Stepness parameter
*/
public static float hillFunc (float K, float concentration, double n)
{
return (float)(Math.Pow (concentration, n) / (K + Math.Pow (concentration, n)));
}
/*!
Implementation of a step function function
\param K Threshold value
\param concentration Quantity of the molecule
*/
public static float stepFunc (float K, float concentration)
{
if (concentration > K)
return 1f;
return 0f;
}
/*!
Execute a Node of type : Constant
\param node The node of the tree to execute
\param molecules The list of molecules
\return The result of the hill function.
*/
private float execConstant (TreeNode<PromoterNodeData> node, Dictionary<string, Molecule> molecules)
{
if (node == null)
return 0f;
if (node.getRightNode ().getData ().token == PromoterParser.eNodeType.BOOL)
return execBool (node.getRightNode ());
Molecule mol = execWord (node.getRightNode (), molecules);
float K = execNum (node.getLeftNode (), molecules);
float n = 1f;
if (node.getLeftNode () != null && node.getLeftNode ().getLeftNode () != null)
n = execNum (node.getLeftNode ().getLeftNode (), molecules);
return hillFunc (K, mol.getConcentration (), n);
}
/*!
Execute a Node of type : Word
\param node The node of the tree to execute
\param molecules The list of molecules
\return return the concentration of the molecule in the node.
*/
private Molecule execWord (TreeNode<PromoterNodeData> node, Dictionary<string, Molecule> molecules)
{
if (node == null || molecules == null)
return null;
return ReactionEngine.getMoleculeFromName (node.getData ().value, molecules);
}
/*!
Execute a Node of type : Bool
\param node The node of the tree to execute
\return 1 if the value of the node is True, 0 else
*/
private float execBool (TreeNode<PromoterNodeData> node)
{
if (node == null)
return 0f;
if (node.getData ().value == "T")
return 1f;
return 0f;
}
/*!
Execute a Node of type : Num
\param node The node of the tree to execute
\param molecules The list of molecules
\return The value that contains the node
*/
private float execNum (TreeNode<PromoterNodeData> node, Dictionary<string, Molecule> molecules)
{
if (node == null || molecules == null)
return 0f;
return parseFloat (node.getData ().value);
}
/*!
Execute a Node.
\param node The node of the tree to execute
\param molecules The list of molecules
\return The result of the function
*/
private float execNode (TreeNode<PromoterNodeData> node, Dictionary<string, Molecule> molecules)
{
if (node != null) {
if (node.getData ().token == PromoterParser.eNodeType.OR)
return Math.Max (execNode (node.getLeftNode (), molecules), execNode (node.getRightNode (), molecules));
else if (node.getData ().token == PromoterParser.eNodeType.AND)
return Math.Min (execNode (node.getLeftNode (), molecules), execNode (node.getRightNode (), molecules));
else if (node.getData ().token == PromoterParser.eNodeType.NOT)
return 1f - execNode (node.getLeftNode (), molecules);
else if (node.getData ().token == PromoterParser.eNodeType.CONSTANT)
return execConstant (node, molecules);
else if (node.getData ().token == PromoterParser.eNodeType.BOOL)
return execBool (node);
else if (node.getData ().token == PromoterParser.eNodeType.WORD) {
Molecule mol = ReactionEngine.getMoleculeFromName (node.getData ().value, molecules);
if (mol != null)
return mol.getConcentration ();
} else if (node.getData ().token == PromoterParser.eNodeType.NUM)
return parseFloat (node.getData ().value);
}
return 1.0f;
}
/*!
\brief Execute a promoter reaction as describe in the detailled reaction
\details Once the tree is executed, the result is put in delta and used as follow :
For each Product P in the operon :
[P] += delta * RBSf * TerminatorFactor * beta(Maximal production)
\param molecules The list of molecules
*/
public override void react (Dictionary<string, Molecule> molecules)
{
if (!_isActive) {
// if(_debug) Debug.Log(this.GetType() + " react !_isActive");
return;
}
float delta = execNode (_formula, molecules);
float energyCoef;
float energyCostTot;
if (delta > 0f && _energyCost > 0f && enableEnergy) {
energyCostTot = _energyCost * delta;
energyCoef = _medium.getEnergy () / energyCostTot;
if (energyCoef > 1f)
energyCoef = 1f;
_medium.subEnergy (energyCostTot);
} else
energyCoef = 1f;
delta *= energyCoef;
foreach (Product pro in _products) {
// if(_debug) Debug.Log(this.GetType() + " react product="+pro);
Molecule mol = ReactionEngine.getMoleculeFromName (pro.getName (), molecules);
// if( mol == null) Debug.Log("mol is null, pro.getName()="+pro.getName()+", molecules="+molecules.ToString());
// if( pro == null) Debug.Log("pro is null");
float increase = delta * pro.v * _terminatorFactor * _beta
* ReactionEngine.reactionSpeed * Time.deltaTime * _reactionSpeed;
// if(_debug) Debug.Log(this.GetType() + " react increase="+increase
// +", delta:"+delta
// +", qFactor:"+pro.v
// +", tFactor:"+_terminatorFactor
// +", beta:"+_beta
// +", reactionsSpeed:"+ReactionEngine.reactionsSpeed
// +", reactionSpeed:"+_reactionSpeed
// );
if (enableSequential) {
float oldCC = mol.getConcentration ();
mol.addConcentration (increase);
float newCC = mol.getConcentration ();
// if(_debug) Debug.Log(this.GetType() + " react ["+mol.getName()+"]old="+oldCC+" ["+mol.getName()+"]new="+newCC);
} else {
mol.addNewConcentration (increase);
// if(_debug) Debug.Log(this.GetType() + " react ["+mol.getName()+"]="+mol.getConcentration()+" addNewConcentration("+increase+")");
}
}
}
// Xml loading
/*!
\brief This class loads promoters reactions from xml files
\details
A PromoterReaction should respect this syntax:
<promoter>
<name>ptet</name> -> The name of the reaction
<productionMax>100</productionMax> -> The maximal production speed of the promoter
<terminatorFactor>1</terminatorFactor> -> between 0 and 1, represents the Terminator
<formula>![0.8,3]tetR</formula> -> The formula that manage the behaviour of the promoter (see PromoterReaction class for more infos)
<EnergyCost>0.1</EnergyCost> -> The cost in energy
<operon>
<gene>
<name>RFP</name> -> The molecule name of a product
<RBSFactor>0.12</RBSFactor> -> The RBS factor that represents the affinity between Ribosome and RBS
</gene>
<gene>
<name>LacI</name>
<RBSFactor>0.12</RBSFactor>
</gene>
</operon>
</promoter>
\sa PromoterReaction
*/
public override bool tryInstantiateFromXml (XmlNode node)
{
bool b = true;
foreach (XmlNode attr in node) {
switch (attr.Name) {
case "name":
b = b && loadPromoterName (attr.InnerText);
break;
case "productionMax":
b = b && loadPromoterProductionMax (attr.InnerText);
break;
case "terminatorFactor":
b = b && loadPromoterTerminatorFactor (attr.InnerText);
break;
case "EnergyCost":
b = b && loadEnergyCost (attr.InnerText);
break;
case "formula":
b = b && loadPromoterFormula (attr.InnerText);
break;
case "operon":
b = b && loadPromoterOperon (attr);
break;
}
}
return b && hasValidData ();
}
public override bool hasValidData ()
{
bool valid = base.hasValidData ()
&& 0 <= _terminatorFactor//! Determine the fiability of the terminator (0-1 which correspond to 0% to 100%)
&& 1 >= _terminatorFactor
&& null != _formula; //! The formula described in the detailed description
if (valid) {
if (0 == _beta) { //! The maximal production of the promoter
Debug.LogWarning (this.GetType () + " hasValidData please check that you really intended a max production rate (beta) of 0 for promoter reaction " + this.getName ());
}
} else {
Debug.LogError (this.GetType () + " hasValidData base.hasValidData()=" + (base.hasValidData ())
+ " & 0 <= _terminatorFactor=" + (0 <= _terminatorFactor)
+ " & 1 >= _terminatorFactor=" + (1 >= _terminatorFactor)
+ " & null != _formula=" + (null != _formula)
+ " => valid=" + valid
);
}
return valid;
}
public override string ToString ()
{
return "Promoter[name:" + _name
+ ", beta:" + _beta
+ ", formula:" + Logger.ToString<PromoterNodeData> (_formula)
+ ", products:" + Logger.ToString<Product> (_products)
+ ", active:" + _isActive
+ ", medium:" + _medium
+ ", reactionSpeed:" + _reactionSpeed
+ ", energyCost:" + _energyCost
+ ", enableSequential:" + enableSequential
+ ", enableEnergy:" + enableEnergy
+ "]";
}
///////////////////////////////////////////////////////////////////////////
/// loading ///////////////////////////////////////////////////////////////
///////////////////////////////////////////////////////////////////////////
/*!
\brief Load promoter name by checking the validity of the given string
\param value The given name
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadPromoterName (string value)
{
if (String.IsNullOrEmpty (value)) {
Debug.LogError (this.GetType () + " Empty name field");
return false;
}
setName (value);
return true;
}
/*!
\brief Load promoter maximal production speed by checking the validity of the given string
\param value The given maximal production
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadPromoterProductionMax (string value)
{
if (String.IsNullOrEmpty (value)) {
Debug.LogError (this.GetType () + " Empty productionMax field");
return false;
}
setBeta (parseFloat (value));
return true;
}
/*!
\brief Load promoter terminator factor by checking the validity of the given string
\param value The given terminator factor
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadPromoterTerminatorFactor (string value)
{
if (String.IsNullOrEmpty (value)) {
Debug.LogError (this.GetType () + " Empty TerminatorFactor field");
return false;
}
setTerminatorFactor (parseFloat (value));
return true;
}
/*!
\brief Load promoter energy cost by checking the validity of the given string
\param value The given energy cost
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadEnergyCost (string value)
{
if (String.IsNullOrEmpty (value)) {
Debug.LogError (this.GetType () + " Empty EnergyCost field. default value = 0");
setEnergyCost (0f);
} else
setEnergyCost (parseFloat (value));
return true;
}
/*!
\brief Load promoter gene by checking the validity of the given strings
\param name The name of the molecule that the gene will produce
\param RBSf The Ribosome Binding Site factor string
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadGene (string name, string RBSf)
{
if (String.IsNullOrEmpty (name) || String.IsNullOrEmpty (RBSf)) {
Debug.LogError (this.GetType () + " Empty Gene name field");
return false;
}
Product gene = new Product (name, parseFloat (RBSf));
addProduct (gene);
return true;
}
/*!
\brief Load promoter operon
\param node the xml node
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadPromoterOperon (XmlNode node)
{
string name = null;
string RBSf = null;
bool n = false;
bool rbsf = false;
bool b = true;
foreach (XmlNode gene in node) {
n = false;
rbsf = false;
foreach (XmlNode attr in gene) {
switch (attr.Name) {
case "name":
name = attr.InnerText;
n = true;
break;
case "RBSFactor":
RBSf = attr.InnerText;
rbsf = true;
break;
}
}
if (n && rbsf)
b = b && loadGene (name, RBSf);
if (!n)
Debug.LogError (this.GetType () + " Missing Gene name in operon");
if (!rbsf)
Debug.LogError (this.GetType () + " Missing RBSfactor in operon");
}
return b;
}
/*!
\brief Load promoter formula by checking the validity of the given string
\param formula The given formula
\return Return true if succeeded and false if value parameter is invalid.
*/
private bool loadPromoterFormula (string formula)
{
TreeNode<PromoterNodeData> tree = _parser.Parse (formula);
if (tree == null) {
Debug.LogError (this.GetType () + " Syntax Error in promoter Formula");
return false;
}
setFormula (tree);
return true;
}
}
| |
//
// Copyright (C) DataStax Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Cassandra.Connections;
using Cassandra.MetadataHelpers;
using Cassandra.ProtocolEvents;
using Cassandra.Tests.Connections;
using Cassandra.Tests.MetadataHelpers.TestHelpers;
using Moq;
using NUnit.Framework;
namespace Cassandra.Tests
{
[TestFixture]
public class TokenTests
{
[Test]
public void Murmur_Hash_Test()
{
//inputs and result values from Cassandra
var values = new Dictionary<byte[], M3PToken>()
{
{new byte[] {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}, new M3PToken(-5563837382979743776L)},
{new byte[] {2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17}, new M3PToken(-1513403162740402161L)},
{new byte[] {3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18}, new M3PToken(-495360443712684655L)},
{new byte[] {4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19}, new M3PToken(1734091135765407943L)},
{new byte[] {5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20}, new M3PToken(-3199412112042527988L)},
{new byte[] {6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21}, new M3PToken(-6316563938475080831L)},
{new byte[] {7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22}, new M3PToken(8228893370679682632L)},
{new byte[] {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, new M3PToken(5457549051747178710L)},
{new byte[] {255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, new M3PToken(-2824192546314762522L)},
{new byte[] {254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254}, new M3PToken(-833317529301936754)},
{new byte[] {000, 001, 002, 003, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, new M3PToken(6463632673159404390L)},
{new byte[] {254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254, 254}, new M3PToken(-1672437813826982685L)},
{new byte[] {254, 254, 254, 254}, new M3PToken(4566408979886474012L)},
{new byte[] {0, 0, 0, 0}, new M3PToken(-3485513579396041028L)},
{new byte[] {0, 1, 127, 127}, new M3PToken(6573459401642635627)},
{new byte[] {0, 255, 255, 255}, new M3PToken(123573637386978882)},
{new byte[] {255, 1, 2, 3}, new M3PToken(-2839127690952877842)},
{new byte[] {000, 001, 002, 003, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255}, new M3PToken(6463632673159404390L)},
{new byte[] {226, 231}, new M3PToken(-8582699461035929883L)},
{new byte[] {226, 231, 226, 231, 226, 231, 1}, new M3PToken(2222373981930033306)},
};
var factory = new M3PToken.M3PTokenFactory();
foreach (var kv in values)
{
Assert.AreEqual(kv.Value, factory.Hash(kv.Key));
}
}
[Test]
public void RandomPartitioner_Hash_Test()
{
//inputs and result values from Cassandra
Func<string, IToken> getToken = RPToken.Factory.Parse;
var values = new Dictionary<byte[], IToken>()
{
{new byte[] {0}, getToken("143927757573010354572009627285182898319")},
{new byte[] {1}, getToken("113842407384990359002707962975597223745")},
{new byte[] {2}, getToken("129721498153058668219395762571499089729")},
{new byte[] {3}, getToken("161634087634434392855851743730996420760")},
{new byte[] {1, 1, 1, 1, 1}, getToken("62826831507722661030027787191787718361")},
{new byte[] {1, 1, 1, 1, 3}, getToken("3280052967642184217852195524766331890")},
{new byte[] {1, 1, 1, 1, 3}, getToken("3280052967642184217852195524766331890")},
{TestHelper.HexToByteArray("00112233445566778899aabbccddeeff"), getToken("146895617013011042239963905141456044092")},
{TestHelper.HexToByteArray("00112233445566778899aabbccddeef0"), getToken("152768415488763703226794584233555130431")}
};
foreach (var kv in values)
{
Assert.AreEqual(kv.Value, RPToken.Factory.Hash(kv.Key));
Assert.AreEqual(kv.Value.ToString(), RPToken.Factory.Hash(kv.Key).ToString());
}
}
[Test]
public void TokenMap_SimpleStrategy_With_Keyspace_Test()
{
var hosts = new List<Host>
{
{ TestHelper.CreateHost("192.168.0.0", "dc1", "rack", new HashSet<string>{"0"})},
{ TestHelper.CreateHost("192.168.0.1", "dc1", "rack", new HashSet<string>{"10"})},
{ TestHelper.CreateHost("192.168.0.2", "dc1", "rack", new HashSet<string>{"20"})}
};
var keyspaces = new List<KeyspaceMetadata>
{
TokenTests.CreateSimpleKeyspace("ks1", 2),
TokenTests.CreateSimpleKeyspace("ks2", 10)
};
var tokenMap = TokenMap.Build("Murmur3Partitioner", hosts, keyspaces);
//the primary replica and the next
var replicas = tokenMap.GetReplicas("ks1", new M3PToken(0));
Assert.AreEqual("0,1", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
replicas = tokenMap.GetReplicas("ks1", new M3PToken(-100));
Assert.AreEqual("0,1", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//Greater than the greatest token
replicas = tokenMap.GetReplicas("ks1", new M3PToken(500000));
Assert.AreEqual("0,1", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//The next replica should be the first
replicas = tokenMap.GetReplicas("ks1", new M3PToken(20));
Assert.AreEqual("2,0", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//The closest replica and the next
replicas = tokenMap.GetReplicas("ks1", new M3PToken(19));
Assert.AreEqual("2,0", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//Even if the replication factor is greater than the ring, it should return only ring size
replicas = tokenMap.GetReplicas("ks2", new M3PToken(5));
Assert.AreEqual("1,2,0", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//The primary replica only as the keyspace was not found
replicas = tokenMap.GetReplicas(null, new M3PToken(0));
Assert.AreEqual("0", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
replicas = tokenMap.GetReplicas(null, new M3PToken(10));
Assert.AreEqual("1", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
replicas = tokenMap.GetReplicas("ks_does_not_exist", new M3PToken(20));
Assert.AreEqual("2", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
replicas = tokenMap.GetReplicas(null, new M3PToken(19));
Assert.AreEqual("2", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
}
[Test]
public void TokenMap_SimpleStrategy_With_Hosts_Without_Tokens()
{
var hosts = new List<Host>
{
{ TestHelper.CreateHost("192.168.0.0", "dc1", "rack", new HashSet<string>{"0"})},
{ TestHelper.CreateHost("192.168.0.1", "dc1", "rack", new string[0])},
{ TestHelper.CreateHost("192.168.0.2", "dc1", "rack", new HashSet<string>{"20"})}
};
var keyspaces = new List<KeyspaceMetadata>
{
TokenTests.CreateSimpleKeyspace("ks1", 10),
TokenTests.CreateSimpleKeyspace("ks2", 2)
};
var tokenMap = TokenMap.Build("Murmur3Partitioner", hosts, keyspaces);
//the primary replica and the next
var replicas = tokenMap.GetReplicas("ks1", new M3PToken(0));
//The node without tokens should not be considered
CollectionAssert.AreEqual(new byte[] { 0, 2 }, replicas.Select(TestHelper.GetLastAddressByte));
replicas = tokenMap.GetReplicas("ks1", new M3PToken(-100));
CollectionAssert.AreEqual(new byte[] { 0, 2 }, replicas.Select(TestHelper.GetLastAddressByte));
//Greater than the greatest token
replicas = tokenMap.GetReplicas("ks1", new M3PToken(500000));
CollectionAssert.AreEqual(new byte[] { 0, 2 }, replicas.Select(TestHelper.GetLastAddressByte));
//The next replica should be the first
replicas = tokenMap.GetReplicas("ks1", new M3PToken(20));
CollectionAssert.AreEqual(new byte[] { 2, 0 }, replicas.Select(TestHelper.GetLastAddressByte));
}
[Test]
public void TokenMap_NetworkTopologyStrategy_With_Keyspace_Test()
{
var hosts = new List<Host>
{
{ TestHelper.CreateHost("192.168.0.0", "dc1", "rack1", new HashSet<string>{"0"})},
{ TestHelper.CreateHost("192.168.0.1", "dc1", "rack1", new HashSet<string>{"100"})},
{ TestHelper.CreateHost("192.168.0.2", "dc1", "rack1", new HashSet<string>{"200"})},
{ TestHelper.CreateHost("192.168.0.100", "dc2", "rack1", new HashSet<string>{"1"})},
{ TestHelper.CreateHost("192.168.0.101", "dc2", "rack1", new HashSet<string>{"101"})},
{ TestHelper.CreateHost("192.168.0.102", "dc2", "rack1", new HashSet<string>{"201"})}
};
const string strategy = ReplicationStrategies.NetworkTopologyStrategy;
var keyspaces = new List<KeyspaceMetadata>
{
//network strategy with rf 2 per dc
new KeyspaceMetadata(null, "ks1", true, strategy, new Dictionary<string, int> {{"dc1", 2}, {"dc2", 2}}),
//Testing simple (even it is not supposed to be)
new KeyspaceMetadata(null, "ks2", true, ReplicationStrategies.SimpleStrategy, new Dictionary<string, int> {{"replication_factor", 3}}),
//network strategy with rf 3 dc1 and 1 dc2
new KeyspaceMetadata(null, "ks3", true, strategy, new Dictionary<string, int> {{"dc1", 3}, {"dc2", 1}, {"dc3", 5}}),
//network strategy with rf 4 dc1
new KeyspaceMetadata(null, "ks4", true, strategy, new Dictionary<string, int> {{"dc1", 5}})
};
var tokenMap = TokenMap.Build("Murmur3Partitioner", hosts, keyspaces);
//KS1
//the primary replica and the next
var replicas = tokenMap.GetReplicas("ks1", new M3PToken(0));
Assert.AreEqual("0,100,1,101", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//The next replica should be the first
replicas = tokenMap.GetReplicas("ks1", new M3PToken(200));
Assert.AreEqual("2,102,0,100", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//The closest replica and the next
replicas = tokenMap.GetReplicas("ks1", new M3PToken(190));
Assert.AreEqual("2,102,0,100", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//KS2
//Simple strategy: 3 tokens no matter which dc
replicas = tokenMap.GetReplicas("ks2", new M3PToken(5000));
Assert.AreEqual("0,100,1", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//KS3
replicas = tokenMap.GetReplicas("ks3", new M3PToken(0));
Assert.AreEqual("0,100,1,2", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
replicas = tokenMap.GetReplicas("ks3", new M3PToken(201));
Assert.AreEqual("102,0,1,2", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
//KS4
replicas = tokenMap.GetReplicas("ks4", new M3PToken(0));
Assert.AreEqual("0,1,2", String.Join(",", replicas.Select(TestHelper.GetLastAddressByte)));
}
[Test]
public void TokenMap_Build_NetworkTopology_Adjacent_Ranges_Test()
{
const string strategy = ReplicationStrategies.NetworkTopologyStrategy;
var hosts = new[]
{
//0 and 100 are adjacent
TestHelper.CreateHost("192.168.0.1", "dc1", "rack1", new HashSet<string> {"0", "100", "1000"}),
TestHelper.CreateHost("192.168.0.2", "dc1", "rack1", new HashSet<string> {"200", "2000", "20000"}),
TestHelper.CreateHost("192.168.0.3", "dc1", "rack1", new HashSet<string> {"300", "3000", "30000"})
};
var ks = new KeyspaceMetadata(null, "ks1", true, strategy, new Dictionary<string, int> { { "dc1", 2 } });
var map = TokenMap.Build("Murmur3Partitioner", hosts, new[] { ks });
var replicas = map.GetReplicas("ks1", new M3PToken(0));
Assert.AreEqual(2, replicas.Count);
//It should contain the first host and the second, even though the first host contains adjacent
CollectionAssert.AreEqual(new byte[] { 1, 2 }, replicas.Select(TestHelper.GetLastAddressByte));
}
[Test]
public void TokenMap_Build_Should_Memorize_Tokens_Per_Replication_Test()
{
const string strategy = ReplicationStrategies.NetworkTopologyStrategy;
var hosts = new[]
{
//0 and 100 are adjacent
TestHelper.CreateHost("192.168.0.1", "dc1", "dc1_rack1", new HashSet<string> {"0", "100", "1000"}),
TestHelper.CreateHost("192.168.0.2", "dc1", "dc1_rack2", new HashSet<string> {"200", "2000", "20000"}),
TestHelper.CreateHost("192.168.0.3", "dc1", "dc1_rack1", new HashSet<string> {"300", "3000", "30000"}),
TestHelper.CreateHost("192.168.0.4", "dc2", "dc2_rack1", new HashSet<string> {"400", "4000", "40000"}),
TestHelper.CreateHost("192.168.0.5", "dc2", "dc2_rack2", new HashSet<string> {"500", "5000", "50000"})
};
var ks1 = new KeyspaceMetadata(null, "ks1", true, strategy, new Dictionary<string, int> { { "dc1", 2 }, { "dc2", 1 } });
var ks2 = new KeyspaceMetadata(null, "ks2", true, strategy, new Dictionary<string, int> { { "dc1", 2 }, { "dc2", 1 } });
var ks3 = new KeyspaceMetadata(null, "ks3", true, strategy, new Dictionary<string, int> { { "dc1", 2 } });
var map = TokenMap.Build("Murmur3Partitioner", hosts, new[] { ks1, ks2, ks3 });
var tokens1 = map.GetByKeyspace("ks1");
var tokens2 = map.GetByKeyspace("ks2");
var tokens3 = map.GetByKeyspace("ks3");
Assert.AreSame(tokens1, tokens2);
Assert.AreNotSame(tokens1, tokens3);
}
[Test]
public void TokenMap_Build_NetworkTopology_Multiple_Racks_Test()
{
const string strategy = ReplicationStrategies.NetworkTopologyStrategy;
var hosts = new[]
{
// DC1 racks has contiguous tokens
// DC2 racks are properly organized
TestHelper.CreateHost("192.168.0.0", "dc1", "dc1_rack1", new HashSet<string> {"0"}),
TestHelper.CreateHost("192.168.0.1", "dc2", "dc2_rack1", new HashSet<string> {"1"}),
TestHelper.CreateHost("192.168.0.2", "dc1", "dc1_rack2", new HashSet<string> {"2"}),
TestHelper.CreateHost("192.168.0.3", "dc2", "dc2_rack2", new HashSet<string> {"3"}),
TestHelper.CreateHost("192.168.0.4", "dc1", "dc1_rack1", new HashSet<string> {"4"}),
TestHelper.CreateHost("192.168.0.5", "dc2", "dc2_rack1", new HashSet<string> {"5"}),
TestHelper.CreateHost("192.168.0.6", "dc1", "dc1_rack2", new HashSet<string> {"6"}),
TestHelper.CreateHost("192.168.0.7", "dc2", "dc2_rack2", new HashSet<string> {"7"})
};
var ks = new KeyspaceMetadata(null, "ks1", true, strategy, new Dictionary<string, int>
{
{ "dc1", 3 },
{ "dc2", 2 }
});
var map = TokenMap.Build("Murmur3Partitioner", hosts, new[] { ks });
var replicas = map.GetReplicas("ks1", new M3PToken(0));
CollectionAssert.AreEqual(new byte[] { 0, 1, 2, 3, 4 }, replicas.Select(TestHelper.GetLastAddressByte));
}
[Test]
public void TokenMap_Build_NetworkTopology_Multiple_Racks_Skipping_Hosts_Test()
{
const string strategy = ReplicationStrategies.NetworkTopologyStrategy;
var hosts = new[]
{
// DC1 racks has contiguous tokens
// DC2 racks are properly organized
TestHelper.CreateHost("192.168.0.0", "dc1", "dc1_rack1", new HashSet<string> {"0"}),
TestHelper.CreateHost("192.168.0.1", "dc2", "dc2_rack1", new HashSet<string> {"1"}),
TestHelper.CreateHost("192.168.0.2", "dc1", "dc1_rack1", new HashSet<string> {"2"}),
TestHelper.CreateHost("192.168.0.3", "dc2", "dc2_rack2", new HashSet<string> {"3"}),
TestHelper.CreateHost("192.168.0.4", "dc1", "dc1_rack2", new HashSet<string> {"4"}),
TestHelper.CreateHost("192.168.0.5", "dc2", "dc2_rack1", new HashSet<string> {"5"}),
TestHelper.CreateHost("192.168.0.6", "dc1", "dc1_rack2", new HashSet<string> {"6"}),
TestHelper.CreateHost("192.168.0.7", "dc2", "dc2_rack2", new HashSet<string> {"7"})
};
var ks = new KeyspaceMetadata(null, "ks1", true, strategy, new Dictionary<string, int>
{
{ "dc1", 3 },
{ "dc2", 2 }
});
var map = TokenMap.Build("Murmur3Partitioner", hosts, new[] { ks });
var values = new[]
{
Tuple.Create(0, new byte[] { 0, 1, 3, 4, 2 }),
Tuple.Create(1, new byte[] { 1, 2, 3, 4, 6 }),
Tuple.Create(4, new byte[] { 4, 5, 7, 0, 6 })
};
foreach (var v in values)
{
var replicas = map.GetReplicas("ks1", new M3PToken(v.Item1));
CollectionAssert.AreEqual(v.Item2, replicas.Select(TestHelper.GetLastAddressByte));
}
}
[Test, TestTimeout(2000)]
public void TokenMap_Build_NetworkTopology_Quickly_Leave_When_Dc_Not_Found()
{
const string strategy = ReplicationStrategies.NetworkTopologyStrategy;
var hosts = new Host[100];
for (var i = 0; i < hosts.Length; i++)
{
hosts[i] = TestHelper.CreateHost("192.168.0." + i, "dc" + (i % 2), "rack1", new HashSet<string>());
}
for (var i = 0; i < 256 * hosts.Length; i++)
{
var tokens = (HashSet<string>)hosts[i % hosts.Length].Tokens;
tokens.Add(i.ToString());
}
var ks = new KeyspaceMetadata(null, "ks1", true, strategy, new Dictionary<string, int>
{
{ "dc1", 3 },
{ "dc2", 2 },
{ "dc3", 1 }
});
TokenMap.Build("Murmur3Partitioner", hosts, new[] { ks });
}
[Test]
public void TokenMap_Build_SimpleStrategy_Adjacent_Ranges_Test()
{
var hosts = new[]
{
//0 and 100 are adjacent
TestHelper.CreateHost("192.168.0.1", "dc1", "rack1", new HashSet<string> {"0", "100", "1000"}),
TestHelper.CreateHost("192.168.0.2", "dc1", "rack1", new HashSet<string> {"200", "2000", "20000"}),
TestHelper.CreateHost("192.168.0.3", "dc1", "rack1", new HashSet<string> {"300", "3000", "30000"})
};
var ks = TokenTests.CreateSimpleKeyspace("ks1", 2);
var map = TokenMap.Build("Murmur3Partitioner", hosts, new[] { ks });
var replicas = map.GetReplicas("ks1", new M3PToken(0));
Assert.AreEqual(2, replicas.Count);
//It should contain the first host and the second, even though the first host contains adjacent
CollectionAssert.AreEqual(new byte[] { 1, 2 }, replicas.Select(TestHelper.GetLastAddressByte));
}
[Test]
public void Build_Should_OnlyCallOncePerReplicationConfiguration_When_MultipleKeyspacesWithSameReplicationOptions()
{
var hosts = new List<Host>
{
{ TestHelper.CreateHost("192.168.0.0", "dc1", "rack", new HashSet<string>{"0"})},
{ TestHelper.CreateHost("192.168.0.1", "dc1", "rack", new HashSet<string>{"10"})},
{ TestHelper.CreateHost("192.168.0.2", "dc1", "rack", new HashSet<string>{"20"})},
{ TestHelper.CreateHost("192.168.0.3", "dc2", "rack", new HashSet<string>{"30"})},
{ TestHelper.CreateHost("192.168.0.4", "dc2", "rack", new HashSet<string>{"40"})}
};
var factory = new ProxyReplicationStrategyFactory();
var keyspaces = new List<KeyspaceMetadata>
{
// unique configurations
TokenTests.CreateSimpleKeyspace("ks1", 2, factory),
TokenTests.CreateSimpleKeyspace("ks2", 10, factory),
TokenTests.CreateSimpleKeyspace("ks3", 5, factory),
TokenTests.CreateNetworkTopologyKeyspace("ks4", new Dictionary<string, int> {{"dc1", 2}, {"dc2", 2}}, factory),
TokenTests.CreateNetworkTopologyKeyspace("ks5", new Dictionary<string, int> {{"dc1", 1}, {"dc2", 2}}, factory),
TokenTests.CreateNetworkTopologyKeyspace("ks6", new Dictionary<string, int> {{"dc1", 1}}, factory),
// duplicate configurations
TokenTests.CreateNetworkTopologyKeyspace("ks7", new Dictionary<string, int> {{"dc1", 2}, {"dc2", 2}}, factory),
TokenTests.CreateNetworkTopologyKeyspace("ks8", new Dictionary<string, int> {{"dc1", 1}}, factory),
TokenTests.CreateNetworkTopologyKeyspace("ks9", new Dictionary<string, int> {{"dc1", 1}, {"dc2", 2}}, factory),
TokenTests.CreateSimpleKeyspace("ks10", 10, factory),
TokenTests.CreateSimpleKeyspace("ks11", 2, factory)
};
var tokenMap = TokenMap.Build("Murmur3Partitioner", hosts, keyspaces);
var proxyStrategies = keyspaces.Select(k => (ProxyReplicationStrategy)k.Strategy).ToList();
Assert.AreEqual(6, proxyStrategies.Count(strategy => strategy.Calls > 0));
AssertOnlyOneStrategyIsCalled(proxyStrategies, 0, 10);
AssertOnlyOneStrategyIsCalled(proxyStrategies, 1, 9);
AssertOnlyOneStrategyIsCalled(proxyStrategies, 2);
AssertOnlyOneStrategyIsCalled(proxyStrategies, 3, 6);
AssertOnlyOneStrategyIsCalled(proxyStrategies, 4, 8);
AssertOnlyOneStrategyIsCalled(proxyStrategies, 5, 7);
}
[Test]
[Repeat(1)]
public void Should_UpdateKeyspacesAndTokenMapCorrectly_When_MultipleThreadsCallingRefreshKeyspace()
{
var keyspaces = new ConcurrentDictionary<string, KeyspaceMetadata>();
// unique configurations
keyspaces.AddOrUpdate("ks1", TokenTests.CreateSimpleKeyspace("ks1", 2), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks2", TokenTests.CreateSimpleKeyspace("ks2", 10), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks3", TokenTests.CreateSimpleKeyspace("ks3", 5), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks4", TokenTests.CreateNetworkTopologyKeyspace("ks4", new Dictionary<string, int> { { "dc1", 2 }, { "dc2", 2 } }), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks5", TokenTests.CreateNetworkTopologyKeyspace("ks5", new Dictionary<string, int> { { "dc1", 1 }, { "dc2", 2 } }), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks6", TokenTests.CreateNetworkTopologyKeyspace("ks6", new Dictionary<string, int> { { "dc1", 1 } }), (s, keyspaceMetadata) => keyspaceMetadata);
// duplicate configurations
keyspaces.AddOrUpdate("ks7", TokenTests.CreateNetworkTopologyKeyspace("ks7", new Dictionary<string, int> { { "dc1", 2 }, { "dc2", 2 } }), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks8", TokenTests.CreateNetworkTopologyKeyspace("ks8", new Dictionary<string, int> { { "dc1", 1 } }), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks9", TokenTests.CreateNetworkTopologyKeyspace("ks9", new Dictionary<string, int> { { "dc1", 1 }, { "dc2", 2 } }), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks10", TokenTests.CreateSimpleKeyspace("ks10", 10), (s, keyspaceMetadata) => keyspaceMetadata);
keyspaces.AddOrUpdate("ks11", TokenTests.CreateSimpleKeyspace("ks11", 2), (s, keyspaceMetadata) => keyspaceMetadata);
var schemaParser = new FakeSchemaParser(keyspaces);
var config = new TestConfigurationBuilder
{
ConnectionFactory = new FakeConnectionFactory()
}.Build();
var metadata = new Metadata(config, schemaParser) {Partitioner = "Murmur3Partitioner"};
metadata.ControlConnection = new ControlConnection(
new ProtocolEventDebouncer(new TaskBasedTimerFactory(), TimeSpan.FromMilliseconds(20), TimeSpan.FromSeconds(100)),
ProtocolVersion.V3,
config,
metadata,
new List<object> { "127.0.0.1" });
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.1"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.2"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.3"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.4"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.5"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.6"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.7"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.8"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.9"), 9042));
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.10"), 9042));
var initialToken = 1;
foreach (var h in metadata.Hosts)
{
h.SetInfo(new TestHelper.DictionaryBasedRow(new Dictionary<string, object>
{
{ "data_center", initialToken % 2 == 0 ? "dc1" : "dc2"},
{ "rack", "rack1" },
{ "tokens", GenerateTokens(initialToken, 256) },
{ "release_version", "3.11.1" }
}));
initialToken++;
}
metadata.RebuildTokenMapAsync(false, true).GetAwaiter().GetResult();
var expectedTokenMap = metadata.TokenToReplicasMap;
Assert.NotNull(expectedTokenMap);
var bag = new ConcurrentBag<string>();
var tasks = new List<Task>();
for (var i = 0; i < 100; i++)
{
var index = i;
tasks.Add(Task.Factory.StartNew(
() =>
{
for (var j = 0; j < 35; j++)
{
if (j % 10 == 0 && index % 2 == 0)
{
metadata.RefreshSchemaAsync().GetAwaiter().GetResult();
}
else if (j % 16 == 0)
{
if (bag.TryTake(out var ksName))
{
if (keyspaces.TryRemove(ksName, out var ks))
{
metadata.RefreshSchemaAsync(ksName).GetAwaiter().GetResult();
ks = metadata.GetKeyspace(ksName);
if (ks != null)
{
throw new Exception($"refresh for {ks.Name} returned non null after refresh single.");
}
}
}
}
else
if (j % 2 == 0)
{
if (bag.TryTake(out var ksName))
{
if (keyspaces.TryRemove(ksName, out var ks))
{
metadata.ControlConnection.HandleKeyspaceRefreshLaterAsync(ks.Name).GetAwaiter().GetResult();
ks = metadata.GetKeyspace(ksName);
if (ks != null)
{
throw new Exception($"refresh for {ks.Name} returned non null after remove.");
}
}
}
}
else
{
var keyspaceName = $"ks_____{index}_____{j}";
var ks = TokenTests.CreateSimpleKeyspace(keyspaceName, (index * j) % 10);
keyspaces.AddOrUpdate(
keyspaceName,
ks,
(s, keyspaceMetadata) => ks);
metadata.ControlConnection.HandleKeyspaceRefreshLaterAsync(ks.Name).GetAwaiter().GetResult();
ks = metadata.GetKeyspace(ks.Name);
if (ks == null)
{
throw new Exception($"refresh for {keyspaceName} returned null after add.");
}
bag.Add(keyspaceName);
}
}
},
TaskCreationOptions.LongRunning | TaskCreationOptions.DenyChildAttach));
}
Task.WaitAll(tasks.ToArray());
AssertSameReplicas(keyspaces.Values, expectedTokenMap, metadata.TokenToReplicasMap);
}
[Test]
public void RefreshSingleKeyspace_Should_BuildTokenMap_When_TokenMapIsNull()
{
var keyspaces = new ConcurrentDictionary<string, KeyspaceMetadata>();
keyspaces.GetOrAdd("ks1", TokenTests.CreateSimpleKeyspace("ks1", 1));
var schemaParser = new FakeSchemaParser(keyspaces);
var metadata = new Metadata(new Configuration(), schemaParser) {Partitioner = "Murmur3Partitioner"};
metadata.Hosts.Add(new IPEndPoint(IPAddress.Parse("192.168.0.1"), 9042));;
metadata.Hosts.First().SetInfo(new TestHelper.DictionaryBasedRow(new Dictionary<string, object>
{
{ "data_center", "dc1"},
{ "rack", "rack1" },
{ "tokens", GenerateTokens(1, 256) },
{ "release_version", "3.11.1" }
}));
Assert.IsNull(metadata.TokenToReplicasMap);
metadata.RefreshSingleKeyspace("ks1").GetAwaiter().GetResult();
Assert.NotNull(metadata.TokenToReplicasMap);
}
private void AssertSameReplicas(IEnumerable<KeyspaceMetadata> keyspaces, IReadOnlyTokenMap expectedTokenMap, IReadOnlyTokenMap actualTokenMap)
{
foreach (var k in keyspaces)
{
var actual = actualTokenMap.GetByKeyspace(k.Name);
var expected = expectedTokenMap.GetByKeyspace(k.Name);
if (expected != null)
{
CollectionAssert.AreEqual(expected.Keys, actual.Keys);
foreach (var kvp in expected)
{
Assert.IsTrue(
expected[kvp.Key].SetEquals(actual[kvp.Key]),
$"mismatch in keyspace '{k}' and token '{kvp.Key}': " +
$"'{string.Join(",", expected[kvp.Key].Select(h => h.Address.ToString()))}' vs " +
$"'{string.Join(",", actual[kvp.Key].Select(h => h.Address.ToString()))}'");
}
}
else
{
// keyspace is one of the keyspaces that were inserted by the tasks and wasn't removed
var rf = k.Replication["replication_factor"];
Assert.AreEqual(10 * 256, actual.Count);
foreach (var kvp in actual)
{
Assert.AreEqual(rf, kvp.Value.Count);
}
}
}
}
private void AssertOnlyOneStrategyIsCalled(IList<ProxyReplicationStrategy> strategies, params int[] equalStrategiesIndexes)
{
var sameStrategies = equalStrategiesIndexes.Select(t => strategies[t]).ToList();
Assert.AreEqual(1, sameStrategies.Count(strategy => strategy.Calls == 1));
Assert.AreEqual(sameStrategies.Count - 1, sameStrategies.Count(strategy => strategy.Calls == 0));
}
private IEnumerable<string> GenerateTokens(int initialToken, int numTokens)
{
var output = new List<string>();
for (var i = 0; i < numTokens; i++)
{
output.Add(initialToken.ToString());
initialToken += 1000;
}
return output;
}
private static KeyspaceMetadata CreateSimpleKeyspace(string name, int replicationFactor, IReplicationStrategyFactory factory = null)
{
return new KeyspaceMetadata(
null,
name,
true,
ReplicationStrategies.SimpleStrategy,
new Dictionary<string, int> { { "replication_factor", replicationFactor } },
factory ?? new ReplicationStrategyFactory());
}
private static KeyspaceMetadata CreateNetworkTopologyKeyspace(string name, IDictionary<string, int> replicationFactors, IReplicationStrategyFactory factory = null)
{
return new KeyspaceMetadata(
null,
name,
true,
ReplicationStrategies.NetworkTopologyStrategy,
replicationFactors,
factory ?? new ReplicationStrategyFactory());
}
}
}
| |
// Copyright (c) Microsoft Corporation.
// Licensed under the MIT License.
using System.Reflection;
using System.Runtime.Serialization;
using System.Security.Permissions;
namespace System.Management.Automation.Runspaces
{
/// <summary>
/// Defines exception thrown when a PSSnapin was not able to load into current runspace.
/// </summary>
/// <!--
/// Implementation of PSSnapInException requires it to
/// 1. Implement IContainsErrorRecord,
/// 2. ISerializable
///
/// Basic information for this exception includes,
/// 1. PSSnapin name
/// 2. Inner exception.
/// -->
[Serializable]
public class PSSnapInException : RuntimeException
{
/// <summary>
/// Initiate an instance of PSSnapInException.
/// </summary>
/// <param name="PSSnapin">PSSnapin for the exception.</param>
/// <param name="message">Message with load failure detail.</param>
internal PSSnapInException(string PSSnapin, string message)
: base()
{
_PSSnapin = PSSnapin;
_reason = message;
CreateErrorRecord();
}
/// <summary>
/// Initiate an instance of PSSnapInException.
/// </summary>
/// <param name="PSSnapin">PSSnapin for the exception.</param>
/// <param name="message">Message with load failure detail.</param>
/// <param name="warning">Whether this is just a warning for PSSnapin load.</param>
internal PSSnapInException(string PSSnapin, string message, bool warning)
: base()
{
_PSSnapin = PSSnapin;
_reason = message;
_warning = warning;
CreateErrorRecord();
}
/// <summary>
/// Initiate an instance of PSSnapInException.
/// </summary>
/// <param name="PSSnapin">PSSnapin for the exception.</param>
/// <param name="message">Message with load failure detail.</param>
/// <param name="exception">Exception for PSSnapin load failure.</param>
internal PSSnapInException(string PSSnapin, string message, Exception exception)
: base(message, exception)
{
_PSSnapin = PSSnapin;
_reason = message;
CreateErrorRecord();
}
/// <summary>
/// Initiate an instance of PSSnapInException.
/// </summary>
public PSSnapInException() : base()
{
}
/// <summary>
/// Initiate an instance of PSSnapInException.
/// </summary>
/// <param name="message">Error message.</param>
public PSSnapInException(string message)
: base(message)
{
}
/// <summary>
/// Initiate an instance of PSSnapInException.
/// </summary>
/// <param name="message">Error message.</param>
/// <param name="innerException">Inner exception.</param>
public PSSnapInException(string message, Exception innerException)
: base(message, innerException)
{
}
/// <summary>
/// Create the internal error record.
/// The ErrorRecord created will be stored in the _errorRecord member.
/// </summary>
private void CreateErrorRecord()
{
// if _PSSnapin or _reason is empty, this exception is created using default
// constructor. Don't create the error record since there is
// no useful information anyway.
if (!string.IsNullOrEmpty(_PSSnapin) && !string.IsNullOrEmpty(_reason))
{
Assembly currentAssembly = typeof(PSSnapInException).Assembly;
if (_warning)
{
_errorRecord = new ErrorRecord(new ParentContainsErrorRecordException(this), "PSSnapInLoadWarning", ErrorCategory.ResourceUnavailable, null);
_errorRecord.ErrorDetails = new ErrorDetails(string.Format(ConsoleInfoErrorStrings.PSSnapInLoadWarning, _PSSnapin, _reason));
}
else
{
_errorRecord = new ErrorRecord(new ParentContainsErrorRecordException(this), "PSSnapInLoadFailure", ErrorCategory.ResourceUnavailable, null);
_errorRecord.ErrorDetails = new ErrorDetails(string.Format(ConsoleInfoErrorStrings.PSSnapInLoadFailure, _PSSnapin, _reason));
}
}
}
private readonly bool _warning = false;
private ErrorRecord _errorRecord;
private bool _isErrorRecordOriginallyNull;
/// <summary>
/// Gets error record embedded in this exception.
/// </summary>
/// <!--
/// This property is required as part of IErrorRecordContainer
/// interface.
/// -->
public override ErrorRecord ErrorRecord
{
get
{
if (_errorRecord == null)
{
_isErrorRecordOriginallyNull = true;
_errorRecord = new ErrorRecord(
new ParentContainsErrorRecordException(this),
"PSSnapInException",
ErrorCategory.NotSpecified,
null);
}
return _errorRecord;
}
}
private readonly string _PSSnapin = string.Empty;
private readonly string _reason = string.Empty;
/// <summary>
/// Gets message for this exception.
/// </summary>
public override string Message
{
get
{
if (_errorRecord != null && !_isErrorRecordOriginallyNull)
{
return _errorRecord.ToString();
}
return base.Message;
}
}
#region Serialization
/// <summary>
/// Initiate a PSSnapInException instance.
/// </summary>
/// <param name="info">Serialization information.</param>
/// <param name="context">Streaming context.</param>
protected PSSnapInException(SerializationInfo info,
StreamingContext context)
: base(info, context)
{
_PSSnapin = info.GetString("PSSnapIn");
_reason = info.GetString("Reason");
CreateErrorRecord();
}
/// <summary>
/// Get object data from serialization information.
/// </summary>
/// <param name="info">Serialization information.</param>
/// <param name="context">Streaming context.</param>
public override void GetObjectData(SerializationInfo info, StreamingContext context)
{
if (info == null)
{
throw PSTraceSource.NewArgumentNullException(nameof(info));
}
base.GetObjectData(info, context);
info.AddValue("PSSnapIn", _PSSnapin);
info.AddValue("Reason", _reason);
}
#endregion Serialization
}
}
| |
using System;
using Interop=System.Runtime.InteropServices;
namespace Ionic.Zlib
{
/// <summary>
/// Describes how to flush the current deflate operation.
/// </summary>
/// <remarks>
/// The different FlushType values are useful when using a Deflate in a streaming application.
/// </remarks>
public enum FlushType
{
/// <summary>No flush at all.</summary>
None = 0,
/// <summary>Closes the current block, but doesn't flush it to
/// the output. Used internally only in hypothetical
/// scenarios. This was supposed to be removed by Zlib, but it is
/// still in use in some edge cases.
/// </summary>
Partial,
/// <summary>
/// Use this during compression to specify that all pending output should be
/// flushed to the output buffer and the output should be aligned on a byte
/// boundary. You might use this in a streaming communication scenario, so that
/// the decompressor can get all input data available so far. When using this
/// with a ZlibCodec, <c>AvailableBytesIn</c> will be zero after the call if
/// enough output space has been provided before the call. Flushing will
/// degrade compression and so it should be used only when necessary.
/// </summary>
Sync,
/// <summary>
/// Use this during compression to specify that all output should be flushed, as
/// with <c>FlushType.Sync</c>, but also, the compression state should be reset
/// so that decompression can restart from this point if previous compressed
/// data has been damaged or if random access is desired. Using
/// <c>FlushType.Full</c> too often can significantly degrade the compression.
/// </summary>
Full,
/// <summary>Signals the end of the compression/decompression stream.</summary>
Finish,
}
/// <summary>
/// The compression level to be used when using a DeflateStream or ZlibStream with CompressionMode.Compress.
/// </summary>
public enum CompressionLevel
{
/// <summary>
/// None means that the data will be simply stored, with no change at all.
/// If you are producing ZIPs for use on Mac OSX, be aware that archives produced with CompressionLevel.None
/// cannot be opened with the default zip reader. Use a different CompressionLevel.
/// </summary>
None= 0,
/// <summary>
/// Same as None.
/// </summary>
Level0 = 0,
/// <summary>
/// The fastest but least effective compression.
/// </summary>
BestSpeed = 1,
/// <summary>
/// A synonym for BestSpeed.
/// </summary>
Level1 = 1,
/// <summary>
/// A little slower, but better, than level 1.
/// </summary>
Level2 = 2,
/// <summary>
/// A little slower, but better, than level 2.
/// </summary>
Level3 = 3,
/// <summary>
/// A little slower, but better, than level 3.
/// </summary>
Level4 = 4,
/// <summary>
/// A little slower than level 4, but with better compression.
/// </summary>
Level5 = 5,
/// <summary>
/// The default compression level, with a good balance of speed and compression efficiency.
/// </summary>
Default = 6,
/// <summary>
/// A synonym for Default.
/// </summary>
Level6 = 6,
/// <summary>
/// Pretty good compression!
/// </summary>
Level7 = 7,
/// <summary>
/// Better compression than Level7!
/// </summary>
Level8 = 8,
/// <summary>
/// The "best" compression, where best means greatest reduction in size of the input data stream.
/// This is also the slowest compression.
/// </summary>
BestCompression = 9,
/// <summary>
/// A synonym for BestCompression.
/// </summary>
Level9 = 9,
}
/// <summary>
/// Describes options for how the compression algorithm is executed. Different strategies
/// work better on different sorts of data. The strategy parameter can affect the compression
/// ratio and the speed of compression but not the correctness of the compresssion.
/// </summary>
public enum CompressionStrategy
{
/// <summary>
/// The default strategy is probably the best for normal data.
/// </summary>
Default = 0,
/// <summary>
/// The <c>Filtered</c> strategy is intended to be used most effectively with data produced by a
/// filter or predictor. By this definition, filtered data consists mostly of small
/// values with a somewhat random distribution. In this case, the compression algorithm
/// is tuned to compress them better. The effect of <c>Filtered</c> is to force more Huffman
/// coding and less string matching; it is a half-step between <c>Default</c> and <c>HuffmanOnly</c>.
/// </summary>
Filtered = 1,
/// <summary>
/// Using <c>HuffmanOnly</c> will force the compressor to do Huffman encoding only, with no
/// string matching.
/// </summary>
HuffmanOnly = 2,
}
/// <summary>
/// An enum to specify the direction of transcoding - whether to compress or decompress.
/// </summary>
public enum CompressionMode
{
/// <summary>
/// Used to specify that the stream should compress the data.
/// </summary>
Compress= 0,
/// <summary>
/// Used to specify that the stream should decompress the data.
/// </summary>
Decompress = 1,
}
/// <summary>
/// A general purpose exception class for exceptions in the Zlib library.
/// </summary>
[Interop.GuidAttribute("ebc25cf6-9120-4283-b972-0e5520d0000E")]
public class ZlibException : System.Exception
{
/// <summary>
/// The ZlibException class captures exception information generated
/// by the Zlib library.
/// </summary>
public ZlibException()
: base()
{
}
/// <summary>
/// This ctor collects a message attached to the exception.
/// </summary>
/// <param name="s">the message for the exception.</param>
public ZlibException(System.String s)
: base(s)
{
}
}
internal class SharedUtils
{
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static int URShift(int number, int bits)
{
return (int)((uint)number >> bits);
}
#if NOT
/// <summary>
/// Performs an unsigned bitwise right shift with the specified number
/// </summary>
/// <param name="number">Number to operate on</param>
/// <param name="bits">Ammount of bits to shift</param>
/// <returns>The resulting number from the shift operation</returns>
public static long URShift(long number, int bits)
{
return (long) ((UInt64)number >> bits);
}
#endif
/// <summary>
/// Reads a number of characters from the current source TextReader and writes
/// the data to the target array at the specified index.
/// </summary>
///
/// <param name="sourceTextReader">The source TextReader to read from</param>
/// <param name="target">Contains the array of characteres read from the source TextReader.</param>
/// <param name="start">The starting index of the target array.</param>
/// <param name="count">The maximum number of characters to read from the source TextReader.</param>
///
/// <returns>
/// The number of characters read. The number will be less than or equal to
/// count depending on the data available in the source TextReader. Returns -1
/// if the end of the stream is reached.
/// </returns>
public static System.Int32 ReadInput(System.IO.TextReader sourceTextReader, byte[] target, int start, int count)
{
// Returns 0 bytes if not enough space in target
if (target.Length == 0) return 0;
char[] charArray = new char[target.Length];
int bytesRead = sourceTextReader.Read(charArray, start, count);
// Returns -1 if EOF
if (bytesRead == 0) return -1;
for (int index = start; index < start + bytesRead; index++)
target[index] = (byte)charArray[index];
return bytesRead;
}
internal static byte[] ToByteArray(System.String sourceString)
{
return System.Text.UTF8Encoding.UTF8.GetBytes(sourceString);
}
internal static char[] ToCharArray(byte[] byteArray)
{
return System.Text.UTF8Encoding.UTF8.GetChars(byteArray);
}
}
internal static class InternalConstants
{
internal static readonly int MAX_BITS = 15;
internal static readonly int BL_CODES = 19;
internal static readonly int D_CODES = 30;
internal static readonly int LITERALS = 256;
internal static readonly int LENGTH_CODES = 29;
internal static readonly int L_CODES = (LITERALS + 1 + LENGTH_CODES);
// Bit length codes must not exceed MAX_BL_BITS bits
internal static readonly int MAX_BL_BITS = 7;
// repeat previous bit length 3-6 times (2 bits of repeat count)
internal static readonly int REP_3_6 = 16;
// repeat a zero length 3-10 times (3 bits of repeat count)
internal static readonly int REPZ_3_10 = 17;
// repeat a zero length 11-138 times (7 bits of repeat count)
internal static readonly int REPZ_11_138 = 18;
}
internal sealed class StaticTree
{
internal static readonly short[] lengthAndLiteralsTreeCodes = new short[] {
12, 8, 140, 8, 76, 8, 204, 8, 44, 8, 172, 8, 108, 8, 236, 8,
28, 8, 156, 8, 92, 8, 220, 8, 60, 8, 188, 8, 124, 8, 252, 8,
2, 8, 130, 8, 66, 8, 194, 8, 34, 8, 162, 8, 98, 8, 226, 8,
18, 8, 146, 8, 82, 8, 210, 8, 50, 8, 178, 8, 114, 8, 242, 8,
10, 8, 138, 8, 74, 8, 202, 8, 42, 8, 170, 8, 106, 8, 234, 8,
26, 8, 154, 8, 90, 8, 218, 8, 58, 8, 186, 8, 122, 8, 250, 8,
6, 8, 134, 8, 70, 8, 198, 8, 38, 8, 166, 8, 102, 8, 230, 8,
22, 8, 150, 8, 86, 8, 214, 8, 54, 8, 182, 8, 118, 8, 246, 8,
14, 8, 142, 8, 78, 8, 206, 8, 46, 8, 174, 8, 110, 8, 238, 8,
30, 8, 158, 8, 94, 8, 222, 8, 62, 8, 190, 8, 126, 8, 254, 8,
1, 8, 129, 8, 65, 8, 193, 8, 33, 8, 161, 8, 97, 8, 225, 8,
17, 8, 145, 8, 81, 8, 209, 8, 49, 8, 177, 8, 113, 8, 241, 8,
9, 8, 137, 8, 73, 8, 201, 8, 41, 8, 169, 8, 105, 8, 233, 8,
25, 8, 153, 8, 89, 8, 217, 8, 57, 8, 185, 8, 121, 8, 249, 8,
5, 8, 133, 8, 69, 8, 197, 8, 37, 8, 165, 8, 101, 8, 229, 8,
21, 8, 149, 8, 85, 8, 213, 8, 53, 8, 181, 8, 117, 8, 245, 8,
13, 8, 141, 8, 77, 8, 205, 8, 45, 8, 173, 8, 109, 8, 237, 8,
29, 8, 157, 8, 93, 8, 221, 8, 61, 8, 189, 8, 125, 8, 253, 8,
19, 9, 275, 9, 147, 9, 403, 9, 83, 9, 339, 9, 211, 9, 467, 9,
51, 9, 307, 9, 179, 9, 435, 9, 115, 9, 371, 9, 243, 9, 499, 9,
11, 9, 267, 9, 139, 9, 395, 9, 75, 9, 331, 9, 203, 9, 459, 9,
43, 9, 299, 9, 171, 9, 427, 9, 107, 9, 363, 9, 235, 9, 491, 9,
27, 9, 283, 9, 155, 9, 411, 9, 91, 9, 347, 9, 219, 9, 475, 9,
59, 9, 315, 9, 187, 9, 443, 9, 123, 9, 379, 9, 251, 9, 507, 9,
7, 9, 263, 9, 135, 9, 391, 9, 71, 9, 327, 9, 199, 9, 455, 9,
39, 9, 295, 9, 167, 9, 423, 9, 103, 9, 359, 9, 231, 9, 487, 9,
23, 9, 279, 9, 151, 9, 407, 9, 87, 9, 343, 9, 215, 9, 471, 9,
55, 9, 311, 9, 183, 9, 439, 9, 119, 9, 375, 9, 247, 9, 503, 9,
15, 9, 271, 9, 143, 9, 399, 9, 79, 9, 335, 9, 207, 9, 463, 9,
47, 9, 303, 9, 175, 9, 431, 9, 111, 9, 367, 9, 239, 9, 495, 9,
31, 9, 287, 9, 159, 9, 415, 9, 95, 9, 351, 9, 223, 9, 479, 9,
63, 9, 319, 9, 191, 9, 447, 9, 127, 9, 383, 9, 255, 9, 511, 9,
0, 7, 64, 7, 32, 7, 96, 7, 16, 7, 80, 7, 48, 7, 112, 7,
8, 7, 72, 7, 40, 7, 104, 7, 24, 7, 88, 7, 56, 7, 120, 7,
4, 7, 68, 7, 36, 7, 100, 7, 20, 7, 84, 7, 52, 7, 116, 7,
3, 8, 131, 8, 67, 8, 195, 8, 35, 8, 163, 8, 99, 8, 227, 8
};
internal static readonly short[] distTreeCodes = new short[] {
0, 5, 16, 5, 8, 5, 24, 5, 4, 5, 20, 5, 12, 5, 28, 5,
2, 5, 18, 5, 10, 5, 26, 5, 6, 5, 22, 5, 14, 5, 30, 5,
1, 5, 17, 5, 9, 5, 25, 5, 5, 5, 21, 5, 13, 5, 29, 5,
3, 5, 19, 5, 11, 5, 27, 5, 7, 5, 23, 5 };
internal static readonly StaticTree Literals;
internal static readonly StaticTree Distances;
internal static readonly StaticTree BitLengths;
internal short[] treeCodes; // static tree or null
internal int[] extraBits; // extra bits for each code or null
internal int extraBase; // base index for extra_bits
internal int elems; // max number of elements in the tree
internal int maxLength; // max bit length for the codes
private StaticTree(short[] treeCodes, int[] extraBits, int extraBase, int elems, int maxLength)
{
this.treeCodes = treeCodes;
this.extraBits = extraBits;
this.extraBase = extraBase;
this.elems = elems;
this.maxLength = maxLength;
}
static StaticTree()
{
Literals = new StaticTree(lengthAndLiteralsTreeCodes, Tree.ExtraLengthBits, InternalConstants.LITERALS + 1, InternalConstants.L_CODES, InternalConstants.MAX_BITS);
Distances = new StaticTree(distTreeCodes, Tree.ExtraDistanceBits, 0, InternalConstants.D_CODES, InternalConstants.MAX_BITS);
BitLengths = new StaticTree(null, Tree.extra_blbits, 0, InternalConstants.BL_CODES, InternalConstants.MAX_BL_BITS);
}
}
/// <summary>
/// Computes an Adler-32 checksum.
/// </summary>
/// <remarks>
/// The Adler checksum is similar to a CRC checksum, but faster to compute, though less
/// reliable. It is used in producing RFC1950 compressed streams. The Adler checksum
/// is a required part of the "ZLIB" standard. Applications will almost never need to
/// use this class directly.
/// </remarks>
///
/// <exclude/>
public sealed class Adler
{
// largest prime smaller than 65536
private static readonly uint BASE = 65521;
// NMAX is the largest n such that 255n(n+1)/2 + (n+1)(BASE-1) <= 2^32-1
private static readonly int NMAX = 5552;
#pragma warning disable 3001
#pragma warning disable 3002
/// <summary>
/// Calculates the Adler32 checksum.
/// </summary>
/// <remarks>
/// <para>
/// This is used within ZLIB. You probably don't need to use this directly.
/// </para>
/// </remarks>
/// <example>
/// To compute an Adler32 checksum on a byte array:
/// <code>
/// var adler = Adler.Adler32(0, null, 0, 0);
/// adler = Adler.Adler32(adler, buffer, index, length);
/// </code>
/// </example>
public static uint Adler32(uint adler, byte[] buf, int index, int len)
{
if (buf == null)
return 1;
uint s1 = (uint) (adler & 0xffff);
uint s2 = (uint) ((adler >> 16) & 0xffff);
while (len > 0)
{
int k = len < NMAX ? len : NMAX;
len -= k;
while (k >= 16)
{
//s1 += (buf[index++] & 0xff); s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
s1 += buf[index++]; s2 += s1;
k -= 16;
}
if (k != 0)
{
do
{
s1 += buf[index++];
s2 += s1;
}
while (--k != 0);
}
s1 %= BASE;
s2 %= BASE;
}
return (uint)((s2 << 16) | s1);
}
#pragma warning restore 3001
#pragma warning restore 3002
}
}
| |
// ---------------------------------------------------------------------------
// <copyright file="WindowsLiveCredentials.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
// ---------------------------------------------------------------------------
//-----------------------------------------------------------------------
// <summary>Defines the WindowsLiveCredentials class.</summary>
//-----------------------------------------------------------------------
namespace Microsoft.Exchange.WebServices.Data
{
using System;
using System.IO;
using System.Net;
using System.Text;
using System.Xml;
/// <summary>
/// WindowsLiveCredentials provides credentials for Windows Live ID authentication.
/// </summary>
internal sealed class WindowsLiveCredentials : WSSecurityBasedCredentials
{
private string windowsLiveId;
private string password;
private Uri windowsLiveUrl;
private bool isAuthenticated;
private bool traceEnabled;
private ITraceListener traceListener = new EwsTraceListener();
// XML-Encryption Namespace.
internal const string XmlEncNamespace = "http://www.w3.org/2001/04/xmlenc#";
// Windows Live SOAP namespace prefix (which is S: instead of soap:)
internal const string WindowsLiveSoapNamespacePrefix = "S";
// XML element names used in RSTR responses from Windows Live
internal const string RequestSecurityTokenResponseCollectionElementName = "RequestSecurityTokenResponseCollection";
internal const string RequestSecurityTokenResponseElementName = "RequestSecurityTokenResponse";
internal const string EncryptedDataElementName = "EncryptedData";
internal const string PpElementName = "pp";
internal const string ReqstatusElementName = "reqstatus";
// The reqstatus we should receive from Windows Live.
internal const string SuccessfulReqstatus = "0x0";
// The default Windows Live URL.
internal static readonly Uri DefaultWindowsLiveUrl = new Uri("https://login.live.com/rst2.srf");
// The reference we use for creating the XML signature.
internal const string XmlSignatureReference = "_EWSTKREF";
/// <summary>
/// Initializes a new instance of the <see cref="WindowsLiveCredentials"/> class.
/// </summary>
/// <param name="windowsLiveId">The user's WindowsLiveId.</param>
/// <param name="password">The password.</param>
public WindowsLiveCredentials(string windowsLiveId, string password)
{
if (windowsLiveId == null)
{
throw new ArgumentNullException("windowsLiveId");
}
if (password == null)
{
throw new ArgumentNullException("password");
}
this.windowsLiveId = windowsLiveId;
this.password = password;
this.windowsLiveUrl = WindowsLiveCredentials.DefaultWindowsLiveUrl;
}
/// <summary>
/// Gets or sets a flag indicating whether tracing is enabled.
/// </summary>
public bool TraceEnabled
{
get
{
return this.traceEnabled;
}
set
{
this.traceEnabled = value;
if (this.traceEnabled && (this.traceListener == null))
{
this.traceListener = new EwsTraceListener();
}
}
}
/// <summary>
/// Gets or sets the trace listener.
/// </summary>
/// <value>The trace listener.</value>
public ITraceListener TraceListener
{
get
{
return this.traceListener;
}
set
{
this.traceListener = value;
this.traceEnabled = value != null;
}
}
/// <summary>
/// Gets or sets the Windows Live Url to use.
/// </summary>
public Uri WindowsLiveUrl
{
get
{
return this.windowsLiveUrl;
}
set
{
// Reset the EWS URL to make sure we go back and re-authenticate next time.
this.EwsUrl = null;
this.IsAuthenticated = false;
this.windowsLiveUrl = value;
}
}
/// <summary>
/// This method is called to apply credentials to a service request before the request is made.
/// </summary>
/// <param name="request">The request.</param>
internal override void PrepareWebRequest(IEwsHttpWebRequest request)
{
if ((this.EwsUrl == null) || (this.EwsUrl != request.RequestUri))
{
this.IsAuthenticated = false;
this.MakeTokenRequestToWindowsLive(request.RequestUri);
this.IsAuthenticated = true;
this.EwsUrl = request.RequestUri;
}
}
/// <summary>
/// Gets or sets a value indicating whether this <see cref="WindowsLiveCredentials"/> has been authenticated.
/// </summary>
/// <value><c>true</c> if authenticated; otherwise, <c>false</c>.</value>
public bool IsAuthenticated
{
get { return this.isAuthenticated; }
internal set { this.isAuthenticated = value; }
}
/// <summary>
/// Function that sends the token request to Windows Live.
/// </summary>
/// <param name="uriForTokenEndpointReference">The Uri to use for the endpoint reference for our token</param>
/// <returns>Response to token request.</returns>
private HttpWebResponse EmitTokenRequest(Uri uriForTokenEndpointReference)
{
const string TokenRequest =
"<?xml version='1.0' encoding='UTF-8'?>" +
"<s:Envelope xmlns:s='http://www.w3.org/2003/05/soap-envelope' " +
" xmlns:wsse='http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-secext-1.0.xsd' " +
" xmlns:saml='urn:oasis:names:tc:SAML:1.0:assertion' " +
" xmlns:wsp='http://schemas.xmlsoap.org/ws/2004/09/policy' " +
" xmlns:wsu='http://docs.oasis-open.org/wss/2004/01/oasis-200401-wss-wssecurity-utility-1.0.xsd' " +
" xmlns:wsa='http://www.w3.org/2005/08/addressing' " +
" xmlns:wssc='http://schemas.xmlsoap.org/ws/2005/02/sc' " +
" xmlns:wst='http://schemas.xmlsoap.org/ws/2005/02/trust' " +
" xmlns:ps='http://schemas.microsoft.com/Passport/SoapServices/PPCRL'>" +
" <s:Header>" +
" <wsa:Action s:mustUnderstand='1'>http://schemas.xmlsoap.org/ws/2005/02/trust/RST/Issue</wsa:Action>" +
" <wsa:To s:mustUnderstand='1'>{0}</wsa:To>" +
" <ps:AuthInfo Id='PPAuthInfo'>" +
" <ps:HostingApp>{{63f179af-8bcd-49a0-a3e5-1154c02df090}}</ps:HostingApp>" + //// NOTE: I generated a new GUID for the EWS API
" <ps:BinaryVersion>5</ps:BinaryVersion>" +
" <ps:UIVersion>1</ps:UIVersion>" +
" <ps:Cookies></ps:Cookies>" +
" <ps:RequestParams>AQAAAAIAAABsYwQAAAAxMDMz</ps:RequestParams>" +
" </ps:AuthInfo>" +
" <wsse:Security>" +
" <wsse:UsernameToken wsu:Id='user'>" +
" <wsse:Username>{1}</wsse:Username>" +
" <wsse:Password>{2}</wsse:Password>" +
" </wsse:UsernameToken>" +
" <wsu:Timestamp Id='Timestamp'>" +
" <wsu:Created>{3}</wsu:Created>" +
" <wsu:Expires>{4}</wsu:Expires>" +
" </wsu:Timestamp>" +
" </wsse:Security>" +
" </s:Header>" +
" <s:Body>" +
" <ps:RequestMultipleSecurityTokens Id='RSTS'>" +
" <wst:RequestSecurityToken Id='RST0'>" +
" <wst:RequestType>http://schemas.xmlsoap.org/ws/2005/02/trust/Issue</wst:RequestType>" +
" <wsp:AppliesTo>" +
" <wsa:EndpointReference>" +
" <wsa:Address>http://Passport.NET/tb</wsa:Address>" +
" </wsa:EndpointReference>" +
" </wsp:AppliesTo>" +
" </wst:RequestSecurityToken>" +
" <wst:RequestSecurityToken Id='RST1'>" +
" <wst:RequestType>http://schemas.xmlsoap.org/ws/2005/02/trust/Issue</wst:RequestType>" +
" <wsp:AppliesTo>" +
" <wsa:EndpointReference>" +
" <wsa:Address>{5}</wsa:Address>" +
" </wsa:EndpointReference>" +
" </wsp:AppliesTo>" +
" <wsp:PolicyReference URI='LBI_FED_SSL'></wsp:PolicyReference>" +
" </wst:RequestSecurityToken>" +
" </ps:RequestMultipleSecurityTokens>" +
" </s:Body>" +
"</s:Envelope>";
// Create a security timestamp valid for 5 minutes to send with the request.
DateTime now = DateTime.UtcNow;
SecurityTimestamp securityTimestamp = new SecurityTimestamp(now, now.AddMinutes(5), "Timestamp");
// Format the request string to send to the server, filling in all the bits.
string requestToSend = String.Format(
TokenRequest,
this.windowsLiveUrl,
this.windowsLiveId,
this.password,
securityTimestamp.GetCreationTimeChars(),
securityTimestamp.GetExpiryTimeChars(),
uriForTokenEndpointReference.ToString());
// Create and send the request.
HttpWebRequest webRequest = (HttpWebRequest) HttpWebRequest.Create(this.windowsLiveUrl);
webRequest.Method = "POST";
webRequest.ContentType = "text/xml; charset=utf-8";
byte[] requestBytes = Encoding.UTF8.GetBytes(requestToSend);
webRequest.ContentLength = requestBytes.Length;
// NOTE: We're not tracing the request to Windows Live here because it has the user name and
// password in it.
using (Stream requestStream = webRequest.GetRequestStream())
{
requestStream.Write(requestBytes, 0, requestBytes.Length);
}
return (HttpWebResponse)webRequest.GetResponse();
}
/// <summary>
/// Traces the response.
/// </summary>
/// <param name="response">The response.</param>
/// <param name="memoryStream">The response content in a MemoryStream.</param>
private void TraceResponse(HttpWebResponse response, MemoryStream memoryStream)
{
EwsUtilities.Assert(
memoryStream != null,
"WindowsLiveCredentials.TraceResponse",
"memoryStream cannot be null");
if (!this.TraceEnabled)
{
return;
}
if (!string.IsNullOrEmpty(response.ContentType) &&
(response.ContentType.StartsWith("text/", StringComparison.OrdinalIgnoreCase) ||
response.ContentType.StartsWith("application/soap", StringComparison.OrdinalIgnoreCase)))
{
this.traceListener.Trace(
"WindowsLiveResponse",
EwsUtilities.FormatLogMessageWithXmlContent("WindowsLiveResponse", memoryStream));
}
else
{
this.traceListener.Trace(
"WindowsLiveResponse",
"Non-textual response");
}
}
private void TraceWebException(WebException e)
{
// If there wasn't a response, there's nothing to trace.
if (e.Response == null)
{
if (this.TraceEnabled)
{
string logMessage = string.Format(
"Exception Received when sending Windows Live token request: {0}",
e);
this.traceListener.Trace("WindowsLiveResponse", logMessage);
}
return;
}
// If tracing is enabled, we read the entire response into a MemoryStream so that we
// can pass it along to the ITraceListener. Then we parse the response from the
// MemoryStream.
if (this.TraceEnabled)
{
using (MemoryStream memoryStream = new MemoryStream())
{
using (Stream responseStream = e.Response.GetResponseStream())
{
// Copy response to in-memory stream and reset position to start.
EwsUtilities.CopyStream(responseStream, memoryStream);
memoryStream.Position = 0;
}
this.TraceResponse((HttpWebResponse) e.Response, memoryStream);
}
}
}
/// <summary>
/// Makes a request to Windows Live to get a token.
/// </summary>
/// <param name="uriForTokenEndpointReference">URL where token is to be used</param>
private void MakeTokenRequestToWindowsLive(Uri uriForTokenEndpointReference)
{
// Post the request to Windows Live and load the response into an EwsXmlReader for
// processing.
HttpWebResponse response;
try
{
response = this.EmitTokenRequest(uriForTokenEndpointReference);
}
catch (WebException e)
{
if (e.Status == WebExceptionStatus.ProtocolError && e.Response != null)
{
this.TraceWebException(e);
}
else
{
if (this.TraceEnabled)
{
string traceString = string.Format(
"Error occurred sending request - status was {0}, exception {1}",
e.Status,
e);
this.traceListener.Trace(
"WindowsLiveCredentials",
traceString);
}
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, e.Message), e);
}
try
{
this.ProcessTokenResponse(response);
}
catch (WebException e)
{
if (this.TraceEnabled)
{
string traceString = string.Format(
"Error occurred sending request - status was {0}, exception {1}",
e.Status,
e);
this.traceListener.Trace(
"WindowsLiveCredentials",
traceString);
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, e.Message), e);
}
}
/// <summary>
/// Function that parses the SOAP headers from the response to the RST to Windows Live.
/// </summary>
/// <param name="rstResponse">The Windows Live response, positioned at the beginning of the SOAP headers.</param>
private void ReadWindowsLiveRSTResponseHeaders(EwsXmlReader rstResponse)
{
// Read the beginning of the SOAP header, then go looking for the Passport SOAP fault section...
rstResponse.ReadStartElement(
WindowsLiveSoapNamespacePrefix,
XmlElementNames.SOAPHeaderElementName);
// Attempt to read to the psf:pp element - if at the end of the ReadToDescendant call we're at the
// end element for the SOAP headers, we didn't find it.
rstResponse.ReadToDescendant(XmlNamespace.PassportSoapFault, PpElementName);
if (rstResponse.IsEndElement(WindowsLiveSoapNamespacePrefix, XmlElementNames.SOAPHeaderElementName))
{
// We didn't find the psf:pp element - without that, we don't know what happened -
// something went wrong. Trace and throw.
if (this.TraceEnabled)
{
this.traceListener.Trace(
"WindowsLiveResponse",
"Could not find Passport SOAP fault information in Windows Live response");
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, PpElementName));
}
// Now that we've found the psf:pp element, look for the 'reqstatus' element under it. If after
// the ReadToDescendant call we're at the end element for the psf:pp element, we didn't find it.
rstResponse.ReadToDescendant(XmlNamespace.PassportSoapFault, ReqstatusElementName);
if (rstResponse.IsEndElement(XmlNamespace.PassportSoapFault, PpElementName))
{
// We didn't find the "reqstatus" element - without that, we don't know what happened -
// something went wrong. Trace and throw.
if (this.TraceEnabled)
{
this.traceListener.Trace(
"WindowsLiveResponse",
"Could not find reqstatus element in Passport SOAP fault information in Windows Live response");
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, ReqstatusElementName));
}
// Now that we've found the reqstatus element, get its value.
string reqstatus = rstResponse.ReadElementValue();
// Read to body tag in both success and failure cases,
// since we need to trace the fault response in failure cases
while (!rstResponse.IsEndElement(WindowsLiveSoapNamespacePrefix, XmlElementNames.SOAPHeaderElementName))
{
rstResponse.Read();
}
if (!string.Equals(reqstatus, SuccessfulReqstatus))
{
// Our request status was non-zero - something went wrong. Trace and throw.
if (this.TraceEnabled)
{
string logMessage = string.Format(
"Received status {0} from Windows Live instead of {1}.",
reqstatus,
SuccessfulReqstatus);
this.traceListener.Trace("WindowsLiveResponse", logMessage);
rstResponse.ReadStartElement(
WindowsLiveSoapNamespacePrefix,
XmlElementNames.SOAPBodyElementName);
// Trace Fault Information
this.traceListener.Trace(
"WindowsLiveResponse",
string.Format(
"Windows Live reported Fault : {0}",
rstResponse.ReadInnerXml()));
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, ReqstatusElementName + ": " + reqstatus));
}
}
/// <summary>
/// Function that parses the RSTR from Windows Live and pulls out all the important pieces
/// of data from it.
/// </summary>
/// <param name="rstResponse">The RSTR, positioned at the beginning of the SOAP body.</param>
private void ParseWindowsLiveRSTResponseBody(EwsXmlReader rstResponse)
{
// Read the WS-Trust RequestSecurityTokenResponseCollection node.
rstResponse.ReadStartElement(
XmlNamespace.WSTrustFebruary2005,
RequestSecurityTokenResponseCollectionElementName);
// Skip the first token - our interest is in the second token (the service token).
rstResponse.SkipElement(
XmlNamespace.WSTrustFebruary2005,
RequestSecurityTokenResponseElementName);
// Now process the second token.
rstResponse.ReadStartElement(
XmlNamespace.WSTrustFebruary2005,
RequestSecurityTokenResponseElementName);
while (!rstResponse.IsEndElement(
XmlNamespace.WSTrustFebruary2005,
RequestSecurityTokenResponseElementName))
{
// Watch for the EncryptedData element - when we find it, parse out the appropriate bits of data.
//
// Also watch for the "pp" element in the Passport SOAP fault namespace, which indicates that
// something went wrong with the token request. If we find it, trace and throw accordingly.
if (rstResponse.IsStartElement() &&
(rstResponse.LocalName == EncryptedDataElementName) &&
(rstResponse.NamespaceUri == XmlEncNamespace))
{
this.SecurityToken = rstResponse.ReadOuterXml();
}
else if (rstResponse.IsStartElement(XmlNamespace.PassportSoapFault, PpElementName))
{
if (this.TraceEnabled)
{
string logMessage = string.Format(
"Windows Live reported an error retrieving the token - {0}",
rstResponse.ReadOuterXml());
this.traceListener.Trace("WindowsLiveResponse", logMessage);
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, EncryptedDataElementName));
}
// Move to the next bit of data...
rstResponse.Read();
}
// If we didn't find the token, throw.
if (this.SecurityToken == null)
{
if (this.TraceEnabled)
{
string logMessage = string.Format(
"Did not find all required parts of the Windows Live response - " +
"Security Token - {0}",
(this.SecurityToken == null) ? "NOT FOUND" : "found");
this.traceListener.Trace("WindowsLiveResponse", logMessage);
}
throw new ServiceRequestException(string.Format(Strings.ServiceRequestFailed, "No security token found."));
}
// Read past the RequestSecurityTokenResponseCollection end element.
rstResponse.Read();
}
/// <summary>
/// Grabs the issued token information out of a response from Windows Live.
/// </summary>
/// <param name="response">The token response</param>
private void ProcessTokenResponse(HttpWebResponse response)
{
// NOTE: We're not tracing responses here because they contain the actual token information
// from Windows Live.
using (Stream responseStream = response.GetResponseStream())
{
// Always start fresh (nulls in all the data we're going to fill in).
this.SecurityToken = null;
EwsXmlReader rstResponse = new EwsXmlReader(responseStream);
rstResponse.Read(XmlNodeType.XmlDeclaration);
rstResponse.ReadStartElement(
WindowsLiveSoapNamespacePrefix,
XmlElementNames.SOAPEnvelopeElementName);
// Process the SOAP headers from the response.
this.ReadWindowsLiveRSTResponseHeaders(rstResponse);
rstResponse.ReadStartElement(
WindowsLiveSoapNamespacePrefix,
XmlElementNames.SOAPBodyElementName);
// Process the SOAP body from the response.
this.ParseWindowsLiveRSTResponseBody(rstResponse);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.AspNetCore.WebUtilities;
using NBitcoin;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using WalletWasabi.Blockchain.Keys;
using WalletWasabi.Helpers;
using WalletWasabi.Tor.Http;
namespace WalletWasabi.WebClients.PayJoin
{
public class PayjoinClient : IPayjoinClient
{
public PayjoinClient(Uri paymentUrl, IHttpClient httpClient)
{
PaymentUrl = paymentUrl;
TorHttpClient = httpClient;
}
public Uri PaymentUrl { get; }
private IHttpClient TorHttpClient { get; }
public async Task<PSBT?> RequestPayjoin(PSBT originalTx, IHDKey accountKey, RootedKeyPath rootedKeyPath, HdPubKey changeHdPubKey, CancellationToken cancellationToken)
{
Guard.NotNull(nameof(originalTx), originalTx);
if (originalTx.IsAllFinalized())
{
throw new InvalidOperationException("The original PSBT should not be finalized.");
}
var optionalParameters = new PayjoinClientParameters();
if (changeHdPubKey is { })
{
var changeOutput = originalTx.Outputs.FirstOrDefault(x => x.ScriptPubKey == changeHdPubKey.P2wpkhScript);
if (changeOutput is PSBTOutput o)
{
optionalParameters.AdditionalFeeOutputIndex = (int)o.Index;
}
}
if (!originalTx.TryGetEstimatedFeeRate(out var originalFeeRate) || !originalTx.TryGetVirtualSize(out var oldVirtualSize))
{
throw new ArgumentException("originalTx should have utxo information", nameof(originalTx));
}
var originalFee = originalTx.GetFee();
// By default, we want to keep same fee rate and a single additional input
optionalParameters.MaxAdditionalFeeContribution = originalFeeRate.GetFee(Helpers.Constants.P2wpkhInputVirtualSize);
optionalParameters.DisableOutputSubstitution = false;
var sentBefore = -originalTx.GetBalance(ScriptPubKeyType.Segwit, accountKey, rootedKeyPath);
var oldGlobalTx = originalTx.GetGlobalTransaction();
var cloned = originalTx.Clone();
if (!cloned.TryFinalize(out var _))
{
return null;
}
// We make sure we don't send unnecessary information to the receiver
foreach (var finalized in cloned.Inputs.Where(i => i.IsFinalized()))
{
finalized.ClearForFinalize();
}
foreach (var output in cloned.Outputs)
{
output.HDKeyPaths.Clear();
}
cloned.GlobalXPubs.Clear();
var endpoint = ApplyOptionalParameters(PaymentUrl, optionalParameters);
var request = new HttpRequestMessage(HttpMethod.Post, endpoint)
{
Content = new StringContent(cloned.ToBase64(), Encoding.UTF8, "text/plain")
};
HttpResponseMessage bpuResponse = await TorHttpClient.SendAsync(request, cancellationToken).ConfigureAwait(false);
if (!bpuResponse.IsSuccessStatusCode)
{
var errorStr = await bpuResponse.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
try
{
var error = JObject.Parse(errorStr);
throw new PayjoinReceiverException((int)bpuResponse.StatusCode,
error["errorCode"].Value<string>(),
error["message"].Value<string>());
}
catch (JsonReaderException)
{
// will throw
bpuResponse.EnsureSuccessStatusCode();
throw;
}
}
var hexOrBase64 = await bpuResponse.Content.ReadAsStringAsync(cancellationToken).ConfigureAwait(false);
var newPSBT = PSBT.Parse(hexOrBase64, originalTx.Network);
// Checking that the PSBT of the receiver is clean
if (newPSBT.GlobalXPubs.Any())
{
throw new PayjoinSenderException("GlobalXPubs should not be included in the receiver's PSBT");
}
if (newPSBT.Outputs.Any(o => o.HDKeyPaths.Count != 0) || newPSBT.Inputs.Any(o => o.HDKeyPaths.Count != 0))
{
throw new PayjoinSenderException("Keypath information should not be included in the receiver's PSBT");
}
if (newPSBT.CheckSanity() is IList<PSBTError> errors2 && errors2.Count != 0)
{
throw new PayjoinSenderException($"The PSBT of the receiver is insane ({errors2[0]})");
}
// Do not trust on inputs order because the payjoin server should shuffle them.
foreach (var input in originalTx.Inputs)
{
var newInput = newPSBT.Inputs.FindIndexedInput(input.PrevOut);
if (newInput is { })
{
newInput.UpdateFrom(input);
newInput.PartialSigs.Clear();
}
}
// We make sure we don't sign things that should not be signed.
foreach (var finalized in newPSBT.Inputs.Where(i => i.IsFinalized()))
{
finalized.ClearForFinalize();
}
// We make sure we don't sign things that should not be signed.
foreach (var output in newPSBT.Outputs)
{
output.HDKeyPaths.Clear();
foreach (var originalOutput in originalTx.Outputs)
{
if (output.ScriptPubKey == originalOutput.ScriptPubKey)
{
output.UpdateFrom(originalOutput);
}
}
}
var newGlobalTx = newPSBT.GetGlobalTransaction();
if (newGlobalTx.Version != oldGlobalTx.Version)
{
throw new PayjoinSenderException("The version field of the transaction has been modified");
}
if (newGlobalTx.LockTime != oldGlobalTx.LockTime)
{
throw new PayjoinSenderException("The LockTime field of the transaction has been modified");
}
// Making sure that our inputs are finalized, and that some of our inputs have not been added.
int ourInputCount = 0;
var accountHDScriptPubkey = new HDKeyScriptPubKey(accountKey, ScriptPubKeyType.Segwit);
foreach (var input in newPSBT.Inputs.CoinsFor(accountHDScriptPubkey, accountKey, rootedKeyPath))
{
if (oldGlobalTx.Inputs.FindIndexedInput(input.PrevOut) is IndexedTxIn ourInput)
{
ourInputCount++;
if (input.IsFinalized())
{
throw new PayjoinSenderException("A PSBT input from us should not be finalized");
}
if (newGlobalTx.Inputs[input.Index].Sequence != ourInput.TxIn.Sequence)
{
throw new PayjoinSenderException("The sequence of one of our input has been modified");
}
}
else
{
throw new PayjoinSenderException("The payjoin receiver added some of our own inputs in the proposal");
}
}
foreach (var input in newPSBT.Inputs)
{
if (originalTx.Inputs.FindIndexedInput(input.PrevOut) is null)
{
if (!input.IsFinalized())
{
throw new PayjoinSenderException("The payjoin receiver included a non finalized input");
}
// Making sure that the receiver's inputs are finalized and match format
var payjoinInputType = input.GetInputScriptPubKeyType();
if (payjoinInputType is null || payjoinInputType.Value != ScriptPubKeyType.Segwit)
{
throw new PayjoinSenderException("The payjoin receiver included an input that is not the same segwit input type");
}
}
}
// Making sure that the receiver's inputs are finalized
foreach (var input in newPSBT.Inputs)
{
if (originalTx.Inputs.FindIndexedInput(input.PrevOut) is null && !input.IsFinalized())
{
throw new PayjoinSenderException("The payjoin receiver included a non finalized input");
}
}
if (ourInputCount < originalTx.Inputs.Count)
{
throw new PayjoinSenderException("The payjoin receiver removed some of our inputs");
}
// We limit the number of inputs the receiver can add
var addedInputs = newPSBT.Inputs.Count - originalTx.Inputs.Count;
if (originalTx.Inputs.Count < addedInputs)
{
throw new PayjoinSenderException("The payjoin receiver added too much inputs");
}
var sentAfter = -newPSBT.GetBalance(ScriptPubKeyType.Segwit, accountKey, rootedKeyPath);
if (sentAfter > sentBefore)
{
var overPaying = sentAfter - sentBefore;
if (!newPSBT.TryGetEstimatedFeeRate(out var newFeeRate) || !newPSBT.TryGetVirtualSize(out var newVirtualSize))
{
throw new PayjoinSenderException("The payjoin receiver did not include UTXO information to calculate fee correctly");
}
var additionalFee = newPSBT.GetFee() - originalFee;
if (overPaying > additionalFee)
{
throw new PayjoinSenderException("The payjoin receiver is sending more money to himself");
}
if (overPaying > originalFee)
{
throw new PayjoinSenderException("The payjoin receiver is making us pay more than twice the original fee");
}
// Let's check the difference is only for the fee and that feerate
// did not change that much
var expectedFee = originalFeeRate.GetFee(newVirtualSize);
// Signing precisely is hard science, give some breathing room for error.
expectedFee += originalFeeRate.GetFee(newPSBT.Inputs.Count * 2);
if (overPaying > (expectedFee - originalFee))
{
throw new PayjoinSenderException("The payjoin receiver increased the fee rate we are paying too much");
}
}
return newPSBT;
}
internal static Uri ApplyOptionalParameters(Uri endpoint, PayjoinClientParameters clientParameters)
{
var parameters = new Dictionary<string, string>
{
{ "v", clientParameters.Version.ToString() }
};
if (clientParameters.AdditionalFeeOutputIndex is int additionalFeeOutputIndex)
{
parameters.Add("additionalfeeoutputindex", additionalFeeOutputIndex.ToString(CultureInfo.InvariantCulture));
}
if (clientParameters.DisableOutputSubstitution is bool disableoutputsubstitution)
{
parameters.Add("disableoutputsubstitution", disableoutputsubstitution ? "true" : "false");
}
if (clientParameters.MaxAdditionalFeeContribution is Money maxAdditionalFeeContribution)
{
parameters.Add("maxadditionalfeecontribution", maxAdditionalFeeContribution.Satoshi.ToString(CultureInfo.InvariantCulture));
}
if (clientParameters.MinFeeRate is FeeRate minFeeRate)
{
parameters.Add("minfeerate", minFeeRate.SatoshiPerByte.ToString(CultureInfo.InvariantCulture));
}
// Remove query from endpoint.
var builder = new UriBuilder(endpoint)
{
Query = ""
};
// Construct final URI.
return new Uri(QueryHelpers.AddQueryString(builder.Uri.AbsoluteUri, parameters));
}
}
}
| |
using Aardvark.Base;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading;
namespace Aardvark.Data.Vrml97
{
/// <summary>
/// Symbol table.
/// </summary>
public static class Vrml97Sym
{
#pragma warning disable 1591
public static readonly Symbol Vrml97 = "Vrml97";
public static readonly Symbol url = "url";
public static readonly Symbol texture = "texture";
public static readonly Symbol name = "name";
public static readonly Symbol filename = "filename";
public static readonly Symbol node = "node";
public static readonly Symbol root = "root";
public static readonly Symbol appearance = "appearance";
public static readonly Symbol material = "material";
public static readonly Symbol textureTransform = "textureTransform";
public static readonly Symbol center = "center";
public static readonly Symbol rotation = "rotation";
public static readonly Symbol scale = "scale";
public static readonly Symbol translation = "translation";
public static readonly Symbol scaleOrientation = "scaleOrientation";
public static readonly Symbol DEF = "DEF";
public static readonly Symbol USE = "USE";
public static readonly Symbol ROUTE = "ROUTE";
public static readonly Symbol NULL = "NULL";
#pragma warning restore 1591
}
/// <summary>
/// Vrml97 parser.
/// Creates a parse tree from a file, or a stream reader.
///
/// Example:
/// Parser parser = new Parser("myVrmlFile.wrl");
/// SymMapBase parseTree = parser.Perform();
///
/// </summary>
internal class Parser
{
#region Public interface.
public static Vrml97Scene FromFile(string fileName)
=> new Parser(fileName).Perform();
public static Vrml97Scene FromStream(Stream stream, string fileName)
=> new Parser(stream, fileName).Perform();
/// <summary>
/// Constructs a Parser for the given input stream.
/// In order to actually parse the data, call the
/// Perform method, which returns a SymMapBase containing
/// the parse tree.
/// </summary>
/// <param name="input">Input stream.</param>
/// <param name="fileName"></param>
public Parser(Stream input, string fileName)
{
m_result.TypeName = Vrml97Sym.Vrml97;
m_result[Vrml97Sym.filename] = fileName;
m_tokenizer = new Tokenizer(input);
}
/// <summary>
/// Constructs a Parser for the given file.
/// In order to actually parse the data, call the
/// Perform method, which returns a SymMapBase
/// containing the parse tree.
/// </summary>
/// <param name="fileName">Input filename.</param>
public Parser(string fileName)
{
m_result.TypeName = Vrml97Sym.Vrml97;
m_result[Vrml97Sym.filename] = fileName;
var fs = new FileStream(
fileName,
FileMode.Open, FileAccess.Read, FileShare.Read,
4096, false
);
m_tokenizer = new Tokenizer(fs);
}
/// <summary>
/// Parses the input data and returns a SymMapBase
/// containing the parse tree.
/// </summary>
/// <returns>Parse tree.</returns>
public Vrml97Scene Perform()
{
var root = new List<SymMapBase>();
while (true)
{
try
{
var node = ParseNode(m_tokenizer);
if (node == null) break;
root.Add(node);
Thread.Sleep(0);
}
catch (ParseException e)
{
Console.WriteLine("WARNING: Caught exception while parsing: {0}!", e.Message);
Console.WriteLine("WARNING: Result may contain partial, incorrect or invalid data!");
break;
}
}
m_result[Vrml97Sym.root] = root;
return new Vrml97Scene(m_result);
}
#endregion
#region Node specs.
/** Static constructor. */
static Parser()
{
var SFBool = new FieldParser(ParseSFBool);
//var MFBool = new FieldParser(ParseMFBool);
var SFColor = new FieldParser(ParseSFColor);
var MFColor = new FieldParser(ParseMFColor);
var SFFloat = new FieldParser(ParseSFFloat);
var MFFloat = new FieldParser(ParseMFFloat);
var SFImage = new FieldParser(ParseSFImage);
var SFInt32 = new FieldParser(ParseSFInt32);
var MFInt32 = new FieldParser(ParseMFInt32);
var SFNode = new FieldParser(ParseSFNode);
var MFNode = new FieldParser(ParseMFNode);
var SFRotation = new FieldParser(ParseSFRotation);
var MFRotation = new FieldParser(ParseMFRotation);
var SFString = new FieldParser(ParseSFString);
var MFString = new FieldParser(ParseMFString);
var SFTime = new FieldParser(ParseSFFloat);
//var MFTime = new FieldParser(ParseMFFloat);
var SFVec2f = new FieldParser(ParseSFVec2f);
var MFVec2f = new FieldParser(ParseMFVec2f);
var SFVec3f = new FieldParser(ParseSFVec3f);
var MFVec3f = new FieldParser(ParseMFVec3f);
// Dictionary<string, (FieldParser, object)> fields;
// Lookup table for Vrml97 node types.
// For each node type a NodeParseInfo entry specifies how
// to handle this kind of node.
m_parseInfoMap = new SymbolDict<NodeParseInfo>
{
// DEF
[Vrml97Sym.DEF] = new NodeParseInfo(new NodeParser(ParseDEF)),
// USE
[Vrml97Sym.USE] = new NodeParseInfo(new NodeParser(ParseUSE)),
// ROUTE
[Vrml97Sym.ROUTE] = new NodeParseInfo(new NodeParser(ParseROUTE)),
// NULL
[Vrml97Sym.NULL] = new NodeParseInfo(new NodeParser(ParseNULL))
};
var defaultBBoxCenter = (SFVec3f, (object)V3f.Zero);
var defaultBBoxSize = (SFVec3f, (object)new V3f(-1, -1, -1));
(FieldParser, object) fdd(FieldParser fp, object obj) => (fp, obj);
(FieldParser, object) fd(FieldParser fp) => (fp, null);
// Anchor
m_parseInfoMap["Anchor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "children", fd(MFNode) },
{ "description", fd(SFString) },
{ "parameter", fd(MFString) },
{ "url", fd(MFString) },
{ "bboxCenter", defaultBBoxCenter},
{ "bboxSize", defaultBBoxSize}
});
// Appearance
m_parseInfoMap["Appearance"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "material", fd(SFNode) },
{ "texture", fd(SFNode) },
{ "textureTransform", fd(SFNode) }
});
// AudioClip
m_parseInfoMap["AudioClip"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "description", fd(SFString) },
{ "loop", fdd(SFBool, false) },
{ "pitch", fdd(SFFloat, 1.0f) },
{ "startTime", fdd(SFTime, 0.0f)},
{ "stopTime", fdd(SFTime, 0.0f)},
{ "url", fd(MFString)}
});
// Background
m_parseInfoMap["Background"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "groundAngle", fd(MFFloat) },
{ "groundColor", fd(MFColor) },
{ "backUrl", fd(MFString) },
{ "bottomUrl", fd(MFString) },
{ "frontUrl", fd(MFString) },
{ "leftUrl", fd(MFString) },
{ "rightUrl", fd(MFString) },
{ "topUrl", fd(MFString) },
{ "skyAngle", fd(MFFloat) },
{ "skyColor", fdd(MFColor, C3f.Black) }
});
// Billboard
m_parseInfoMap["Billboard"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "axisOfRotation", fdd(SFVec3f, new V3f(0.0f, 1.0f, 0.0f)) },
{ "children", fd(MFNode) },
{ "bboxCenter", defaultBBoxCenter},
{ "bboxSize", defaultBBoxSize}
});
// Box
m_parseInfoMap["Box"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "size", fdd(SFVec3f, new V3f(2.0f, 2.0f, 2.0f)) }
});
// Collision
m_parseInfoMap["Collision"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "children", fd(MFNode) },
{ "collide", fdd(SFBool, true) },
{ "bboxCenter", defaultBBoxCenter},
{ "bboxSize", defaultBBoxSize},
{ "proxy", fd(SFNode) }
});
// Color
m_parseInfoMap["Color"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(MFColor) }
});
// ColorInterpolator
m_parseInfoMap["ColorInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFColor) }
});
// Cone
m_parseInfoMap["Cone"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "bottomRadius", fdd(SFFloat, 1.0f) },
{ "height", fdd(SFFloat, 2.0f) },
{ "side", fdd(SFBool, true) },
{ "bottom", fdd(SFBool, true) }
});
// Coordinate
m_parseInfoMap["Coordinate"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "point", fd(MFVec3f) }
});
// CoordinateInterpolator
m_parseInfoMap["CoordinateInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFVec3f) }
});
// Cylinder
m_parseInfoMap["Cylinder"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "bottom", fdd(SFBool, true) },
{ "height", fdd(SFFloat, 2.0f) },
{ "radius", fdd(SFFloat, 1.0f) },
{ "side", fdd(SFBool, true) },
{ "top", fdd(SFBool, true) }
});
// CylinderSensor
m_parseInfoMap["CylinderSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "autoOffset", fdd(SFBool, true) },
{ "diskAngle", fdd(SFFloat, 0.262f) },
{ "enabled", fdd(SFBool, true) },
{ "maxAngle", fdd(SFFloat, -1.0f) },
{ "minAngle", fdd(SFFloat, 0.0f) },
{ "offset", fdd(SFFloat, 0.0f) }
});
// DirectionalLight
m_parseInfoMap["DirectionalLight"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.0f) },
{ "color", fdd(SFColor, C3f.White) },
{ "direction", fdd(SFVec3f, new V3f(0.0f, 0.0f, -1.0f)) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "on", fdd(SFBool, true) }
});
// ElevationGrid
m_parseInfoMap["ElevationGrid"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "normal", fd(SFNode) },
{ "texCoord", fd(SFNode) },
{ "height", fd(MFFloat) },
{ "ccw", fdd(SFBool, true) },
{ "colorPerVertex", fdd(SFBool, true) },
{ "creaseAngle", fdd(SFFloat, 0.0f) },
{ "normalPerVertex", fdd(SFBool, true) },
{ "solid", fdd(SFBool, true) },
{ "xDimension", fdd(SFInt32, 0) },
{ "xSpacing", fdd(SFFloat, 1.0f) },
{ "zDimension", fdd(SFInt32, 0) },
{ "zSpacing", fdd(SFFloat, 1.0f) }
});
// Extrusion
m_parseInfoMap["Extrusion"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "beginCap", fdd(SFBool, true) },
{ "ccw", fdd(SFBool, true) },
{ "convex", fdd(SFBool, true) },
{ "creaseAngle", fdd(SFFloat, 0.0f) },
{ "crossSection", fdd(MFVec2f, new List<V2f>() {new V2f(1.0f, 1.0f), new V2f(1.0f, -1.0f), new V2f(-1.0f, -1.0f), new V2f(-1.0f, 1.0f), new V2f(1.0f, 1.0f) }) },
{ "endCap", fdd(SFBool, true) },
{ "orientation", fdd(MFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "scale", fdd(MFVec2f, new V2f(1.0f, 1.0f)) },
{ "solid", fdd(SFBool, true) },
{ "spine", fdd(MFVec3f, new List<V3f>() { V3f.Zero, new V3f(0.0f, 1.0f, 0.0f) }) }
});
// Fog
m_parseInfoMap["Fog"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fdd(SFColor, C3f.White) },
{ "fogType", fdd(SFString, "LINEAR") },
{ "visibilityRange", fdd(SFFloat, 0.0f) }
});
// FontStyle
m_parseInfoMap["FontStyle"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "family", fdd(MFString, "SERIF") },
{ "horizontal", fdd(SFBool, true) },
{ "justify", fdd(MFString, "BEGIN") },
{ "language", fd(SFString) },
{ "leftToRight", fdd(SFBool, true) },
{ "size", fdd(SFFloat, 1.0f) },
{ "spacing", fdd(SFFloat, 1.0f) },
{ "style", fdd(SFString, "PLAIN") },
{ "topToBottom", fdd(SFBool, true) }
});
// Group
m_parseInfoMap["Group"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "children", fd(MFNode) },
{ "bboxCenter", defaultBBoxCenter },
{ "bboxSize", defaultBBoxSize }
});
// ImageTexture
m_parseInfoMap["ImageTexture"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "url", fd(MFString) },
{ "repeatS", fdd(SFBool, true) },
{ "repeatT", fdd(SFBool, true) }
});
// IndexedFaceSet
m_parseInfoMap["IndexedFaceSet"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "coord", fd(SFNode) },
{ "normal", fd(SFNode) },
{ "texCoord", fd(SFNode) },
{ "ccw", fdd(SFBool, true) },
{ "colorIndex", fd(MFInt32) },
{ "colorPerVertex", fdd(SFBool, true) },
{ "convex", fdd(SFBool, true) },
{ "coordIndex", fd(MFInt32) },
{ "creaseAngle", fdd(SFFloat, 0.0f) },
{ "normalIndex", fd(MFInt32) },
{ "normalPerVertex", fdd(SFBool, true) },
{ "solid", fdd(SFBool, true) },
{ "texCoordIndex", fd(MFInt32) },
{ "edgeSharpness", fd(MFFloat) },
{ "edgeSharpnessIndex", fd(MFInt32) },
{ "neighborMesh", fd(MFString) },
{ "neighborIndex", fd(MFInt32) },
{ "neighborSide", fd(MFInt32) },
{ "neighborFace", fd(MFInt32) },
{ "meshName", fd(SFString) },
{ "topologyHoles", fd(SFInt32) }
});
// IndexedLineSet
m_parseInfoMap["IndexedLineSet"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "coord", fd(SFNode) },
{ "colorIndex", fd(MFInt32) },
{ "colorPerVertex", fdd(SFBool, true) },
{ "coordIndex", fd(MFInt32) }
});
// Inline
m_parseInfoMap["Inline"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "url", fd(MFString) },
{ "bboxCenter", defaultBBoxCenter },
{ "bboxSize", defaultBBoxSize }
});
// LOD
m_parseInfoMap["LOD"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "level", fd(MFNode) },
{ "center", defaultBBoxCenter },
{ "range", fd(MFFloat) }
});
// Material
m_parseInfoMap["Material"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.2f) },
{ "diffuseColor", fdd(SFColor, new C3f(0.8f, 0.8f, 0.8f)) },
{ "emissiveColor", fdd(SFColor, C3f.Black) },
{ "shininess", fdd(SFFloat, 0.2f) },
{ "specularColor", fdd(SFColor, C3f.Black) },
{ "transparency", fdd(SFFloat, 0.0f) }
});
// MovieTexture
m_parseInfoMap["MovieTexture"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "loop", fdd(SFBool, false) },
{ "speed", fdd(SFFloat, 1.0f) },
{ "startTime", fdd(SFTime, 1.0f) },
{ "stopTime", fdd(SFTime, 1.0f) },
{ "url", fd(MFString) },
{ "repeatS", fdd(SFBool, true) },
{ "repeatT", fdd(SFBool, true) }
});
// NavigationInfo
m_parseInfoMap["NavigationInfo"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "avatarSize", fdd(MFFloat, new List<float>() {0.25f, 1.6f, 0.75f}) },
{ "headlight", fdd(SFBool, true) },
{ "speed", fdd(SFFloat, 1.0f) },
{ "type", fdd(MFString, new List<string>() {"WALK", "ANY"}) },
{ "visibilityLimit", fdd(SFFloat, 0.0f) }
});
// Normal
m_parseInfoMap["Normal"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "vector", fd(MFVec3f) }
});
// NormalInterpolator
m_parseInfoMap["NormalInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFVec3f) }
});
// OrientationInterpolator
m_parseInfoMap["OrientationInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFRotation) }
});
// PixelTexture
m_parseInfoMap["PixelTexture"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "image", fdd(SFImage, new List<uint>() {0, 0, 0}) },
{ "repeatS", fdd(SFBool, true) },
{ "repeatT", fdd(SFBool, true) }
});
// PlaneSensor
m_parseInfoMap["PlaneSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "autoOffset", fdd(SFBool, true) },
{ "enabled", fdd(SFBool, true) },
{ "maxPosition", fdd(SFVec2f, new V2f(-1.0f, -1.0f)) },
{ "minPosition", fdd(SFVec2f, V2f.Zero) },
{ "offset", defaultBBoxCenter }
});
// PointLight
m_parseInfoMap["PointLight"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.0f) },
{ "attenuation", fdd(SFVec3f, new V3f(1.0f, 0.0f, 0.0f)) },
{ "color", fdd(SFColor, C3f.White) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "location", defaultBBoxCenter },
{ "on", fdd(SFBool, true) },
{ "radius", fdd(SFFloat, 100.0f) }
});
// PointSet
m_parseInfoMap["PointSet"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "color", fd(SFNode) },
{ "coord", fd(SFNode) }
});
// PositionInterpolator
m_parseInfoMap["PositionInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFVec3f) }
});
// ProximitySensor
m_parseInfoMap["ProximitySensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", defaultBBoxCenter },
{ "size", defaultBBoxCenter },
{ "enabled", fdd(SFBool, true) }
});
// ScalarInterpolator
m_parseInfoMap["ScalarInterpolator"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "key", fd(MFFloat) },
{ "keyValue", fd(MFFloat) }
});
// Script
// skipped
// Shape
m_parseInfoMap["Shape"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "appearance", fd(SFNode) },
{ "geometry", fd(SFNode) },
});
// Sound
m_parseInfoMap["Sound"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "direction", fdd(SFVec3f, new V3f(0.0f, 0.0f, 1.0f)) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "location", defaultBBoxCenter },
{ "maxBack", fdd(SFFloat, 10.0f) },
{ "maxFront", fdd(SFFloat, 10.0f) },
{ "minBack", fdd(SFFloat, 1.0f) },
{ "minFront", fdd(SFFloat, 1.0f) },
{ "priority", fdd(SFFloat, 0.0f) },
{ "source", fd(SFNode) },
{ "spatialize", fdd(SFBool, true) }
});
// Sphere
m_parseInfoMap["Sphere"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "radius", fdd(SFFloat, 1.0f) }
});
// SphereSensor
m_parseInfoMap["SphereSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "autoOffset", fdd(SFBool, true) },
{ "enabled", fdd(SFBool, true) },
{ "offset", fdd(SFRotation, new V4f(0.0f, 1.0f, 0.0f, 0.0f)) }
});
// SpotLight
m_parseInfoMap["SpotLight"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "ambientIntensity", fdd(SFFloat, 0.0f) },
{ "attenuation", fdd(SFVec3f, new V3f(1.0f, 0.0f, 0.0f)) },
{ "beamWidth", fdd(SFFloat, 1.570796f) },
{ "color", fdd(SFColor, C3f.White) },
{ "cutOffAngle", fdd(SFFloat, 0.785398f) },
{ "direction", fdd(SFVec3f, new V3f(0.0f, 0.0f, -1.0f)) },
{ "intensity", fdd(SFFloat, 1.0f) },
{ "location", defaultBBoxCenter },
{ "on", fdd(SFBool, true) },
{ "radius", fdd(SFFloat, 100.0f) }
});
// Switch
m_parseInfoMap["Switch"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "choice", fd(MFNode) },
{ "whichChoice", fdd(SFInt32, -1) }
});
// Text
m_parseInfoMap["Text"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "string", fd(MFString) },
{ "fontStyle", fd(SFNode) },
{ "length", fd(MFFloat) },
{ "maxExtent", fdd(SFFloat, 0.0f) }
});
// TextureCoordinate
m_parseInfoMap["TextureCoordinate"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "point", fd(MFVec2f) }
});
// TextureTransform
m_parseInfoMap["TextureTransform"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", fdd(SFVec2f, V2f.Zero) },
{ "rotation", fdd(SFFloat, 0.0f) },
{ "scale", fdd(SFVec2f, new V2f(1.0f, 1.0f)) },
{ "translation", fdd(SFVec2f, V2f.Zero) }
});
// TimeSensor
m_parseInfoMap["TimeSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "cycleInterval", fdd(SFTime, 1.0f) },
{ "enabled", fdd(SFBool, true) },
{ "loop", fdd(SFBool, false) },
{ "startTime", fdd(SFTime, 0.0f) },
{ "stopTime", fdd(SFTime, 0.0f) }
});
// TouchSensor
m_parseInfoMap["TouchSensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "enabled", fdd(SFBool, true) }
});
// Transform
m_parseInfoMap["Transform"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", defaultBBoxCenter },
{ "children", fd(MFNode) },
{ "rotation", fdd(SFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "scale", fdd(SFVec3f, new V3f(1.0f, 1.0f, 1.0f)) },
{ "scaleOrientation", fdd(SFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "translation", defaultBBoxCenter },
{ "bboxCenter", defaultBBoxCenter },
{ "bboxSize", defaultBBoxSize }
});
// Viewpoint
m_parseInfoMap["Viewpoint"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "fieldOfView", fdd(SFFloat, 0.785398f) },
{ "jump", fdd(SFBool, true) },
{ "orientation", fdd(SFRotation, new V4f(0.0f, 0.0f, 1.0f, 0.0f)) },
{ "position", fdd(SFVec3f, new V3f(0.0f, 0.0f, 10.0f)) },
{ "description", fd(SFString) }
});
// VisibilitySensor
m_parseInfoMap["VisibilitySensor"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "center", defaultBBoxCenter },
{ "enabled", fdd(SFBool, true) },
{ "size", defaultBBoxCenter }
});
// WorldInfo
m_parseInfoMap["WorldInfo"] = new NodeParseInfo(
new SymbolDict<(FieldParser, object)>()
{
{ "title", fd(SFString) },
{ "info", fd(MFString) }
});
}
private static SymMapBase ParseDEF(Tokenizer t)
{
var result = new SymMapBase();
result["name"] = t.NextNameToken().ToString();
result["node"] = ParseNode(t);
return result;
}
private static SymMapBase ParseUSE(Tokenizer t)
{
var result = new SymMapBase();
result["name"] = t.NextNameToken().ToString();
return result;
}
private static SymMapBase ParseROUTE(Tokenizer t)
{
var result = new SymMapBase();
// nodeNameId.eventOutId
result["out"] = t.NextNameToken().ToString();
// "TO"
t.NextToken();
// nodeNameId.eventInId
result["in"] = t.NextNameToken().ToString();
return result;
}
private static SymMapBase ParseNULL(Tokenizer t) => null;
#endregion
#region Helper functions.
private static object ParseSFBool(Tokenizer t) => t.NextToken().ToBool();
private static List<bool> ParseMFBool(Tokenizer t)
{
var result = new List<bool>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.ToBool());
token = t.NextToken();
}
}
else
{
result.Add(token.ToBool());
}
return result;
}
private static object ParseSFFloat(Tokenizer t) => t.NextToken().ToFloat();
private static List<float> ParseMFFloat(Tokenizer t)
{
var result = new List<float>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.ToFloat());
token = t.NextToken();
}
}
else
{
result.Add(token.ToFloat());
}
return result;
}
private static List<uint> ParseSFImage(Tokenizer t)
{
var result = new List<uint>
{
t.NextToken().ToUInt32(), // width
t.NextToken().ToUInt32(), // height
t.NextToken().ToUInt32() // num components
};
uint imax = result[0] * result[1];
for (uint i = 0; i < imax; i++)
{
result.Add(t.NextToken().ToUInt32());
}
return result;
}
private static object ParseSFInt32(Tokenizer t) => t.NextToken().ToInt32();
private static List<int> ParseMFInt32(Tokenizer t)
{
var result = new List<int>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.ToInt32());
token = t.NextToken();
}
}
else
{
result.Add(token.ToInt32());
}
return result;
}
private static SymMapBase ParseSFNode(Tokenizer t) => ParseNode(t);
private static List<SymMapBase> ParseMFNode(Tokenizer t)
{
var result = new List<SymMapBase>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
t.PushBack(token);
result.Add(ParseNode(t));
token = t.NextToken();
}
}
else
{
t.PushBack(token);
result.Add(ParseNode(t));
}
return result;
}
private static object ParseSFRotation(Tokenizer t)
{
var x = t.NextToken().ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
var w = t.NextToken().ToFloat();
return new V4f(x, y, z, w);
}
private static List<V4f> ParseMFRotation(Tokenizer t)
{
var result = new List<V4f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
var w = t.NextToken().ToFloat();
result.Add(new V4f(x, y, z, w));
token = t.NextToken();
}
}
else
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
var w = t.NextToken().ToFloat();
result.Add(new V4f(x, y, z, w));
}
return result;
}
private static string ParseSFString(Tokenizer t)
=> t.NextToken().GetCheckedUnquotedString();
private static List<string> ParseMFString(Tokenizer t)
{
var result = new List<string>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
result.Add(token.GetCheckedUnquotedString());
token = t.NextToken();
}
}
else
{
result.Add(token.GetCheckedUnquotedString());
}
return result;
}
private static object ParseSFVec2f(Tokenizer t)
{
var x = t.NextToken().ToFloat();
var y = t.NextToken().ToFloat();
return new V2f(x, y);
}
private static List<V2f> ParseMFVec2f(Tokenizer t)
{
var result = new List<V2f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
float x = token.ToFloat();
float y = t.NextToken().ToFloat();
result.Add(new V2f(x, y));
token = t.NextToken();
}
}
else
{
float x = token.ToFloat();
float y = t.NextToken().ToFloat();
result.Add(new V2f(x, y));
}
return result;
}
private static object ParseSFVec3f(Tokenizer t)
{
var x = t.NextToken().ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
return new V3f(x, y, z);
}
private static List<V3f> ParseMFVec3f(Tokenizer t)
{
var result = new List<V3f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
result.Add(new V3f(x, y, z));
token = t.NextToken();
}
}
else
{
var x = token.ToFloat();
var y = t.NextToken().ToFloat();
var z = t.NextToken().ToFloat();
result.Add(new V3f(x, y, z));
}
return result;
}
private static object ParseSFColor(Tokenizer t)
{
var r = t.NextToken().ToFloat();
var g = t.NextToken().ToFloat();
var b = t.NextToken().ToFloat();
return new C3f(r, g, b);
}
private static List<C3f> ParseMFColor(Tokenizer t)
{
var result = new List<C3f>();
var token = t.NextToken();
if (token.IsBracketOpen)
{
token = t.NextToken();
while (!token.IsBracketClose)
{
var r = token.ToFloat();
var g = t.NextToken().ToFloat();
var b = t.NextToken().ToFloat();
result.Add(new C3f(r, g, b));
token = t.NextToken();
}
}
else
{
var r = token.ToFloat();
var g = t.NextToken().ToFloat();
var b = t.NextToken().ToFloat();
result.Add(new C3f(r, g, b));
}
return result;
}
private static void ExpectBraceOpen(Tokenizer t)
{
var token = t.NextToken();
if (token.IsBraceOpen) return;
throw new ParseException(
"Token '{' expected. Found " + token.ToString() + " instead!"
);
}
private static void ExpectBraceClose(Tokenizer t)
{
var token = t.NextToken();
if (token.IsBraceClose) return;
throw new ParseException(
"Token '}' expected. Found " + token.ToString() + " instead!"
);
}
#endregion
#region Internal stuff.
private static SymMapBase ParseNode(Tokenizer t)
{
// Next token is expected to be a Vrml97 node type.
var nodeType = t.NextToken().ToString();
if (nodeType == null) return null;
SymMapBase node;
// If a field description is available for this type,
// then use the generic node parser, else use the custom
// parse function.
if (m_parseInfoMap.ContainsKey(nodeType))
{
var info = m_parseInfoMap[nodeType];
node = (info.FieldDefs == null) ?
info.NodeParser(t) :
ParseGenericNode(t, info);
}
else
{
// unknown node type
node = ParseUnknownNode(t);
}
if (node != null)
node.TypeName = nodeType;
return node;
}
/**
* Specifies how to parse a node.
**/
private struct NodeParseInfo
{
private NodeParser m_parseFunction;
public readonly SymbolDict<(FieldParser, object)> FieldDefs;
public NodeParseInfo(NodeParser parseFunction)
: this(parseFunction, null)
{ }
public NodeParseInfo(
SymbolDict<(FieldParser, object)> fields)
: this(null, fields)
{ }
public NodeParseInfo(
NodeParser parseFunction,
SymbolDict<(FieldParser, object)> fields)
{
m_parseFunction = parseFunction;
FieldDefs = fields;
}
public NodeParser NodeParser { get { return m_parseFunction; } }
public FieldParser FieldParser(string fieldName)
{
if (fieldName == "ROUTE") return new FieldParser(ParseROUTE);
return FieldDefs[fieldName].Item1;
}
public object DefaultValue(string fieldName)
{
return FieldDefs[fieldName].Item2;
}
}
private static SymMapBase ParseGenericNode(
Tokenizer t,
NodeParseInfo info
)
{
var result = new SymMapBase();
ExpectBraceOpen(t);
// populate fields with default values
foreach (var kvp in info.FieldDefs)
{
if (kvp.Value.Item2 == null) continue;
result[kvp.Key] = kvp.Value.Item2;
}
Tokenizer.Token token = t.NextToken();
while (!token.IsBraceClose)
{
string fieldName = token.ToString();
result[fieldName] = info.FieldParser(fieldName)(t);
token = t.NextToken();
Thread.Sleep(0);
}
return result;
}
private static SymMapBase ParseUnknownNode(Tokenizer t)
{
ExpectBraceOpen(t);
var level = 1;
var sb = new StringBuilder("{");
do
{
var token = t.NextToken();
sb.Append(" " + token);
if (token.IsBraceOpen) level++;
if (token.IsBraceClose) level--;
}
while (level > 0);
var result = new SymMapBase();
result["unknownNode"] = true;
result["content"] = sb.ToString();
return result;
}
private delegate SymMapBase NodeParser(Tokenizer t);
private delegate object FieldParser(Tokenizer t);
private static SymbolDict<NodeParseInfo> m_parseInfoMap;
private SymMapBase m_result = new SymMapBase();
private Tokenizer m_tokenizer;
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
** Class: DictionaryBase
**
** Purpose: Provides the abstract base class for a
** strongly typed collection of key/value pairs.
**
===========================================================*/
namespace System.Collections
{
// Useful base class for typed read/write collections where items derive from object
[Serializable]
public abstract class DictionaryBase : IDictionary
{
private Hashtable _hashtable;
protected Hashtable InnerHashtable
{
get
{
if (_hashtable == null)
_hashtable = new Hashtable();
return _hashtable;
}
}
protected IDictionary Dictionary
{
get { return (IDictionary)this; }
}
public int Count
{
// to avoid newing inner list if no items are ever added
get { return _hashtable == null ? 0 : _hashtable.Count; }
}
bool IDictionary.IsReadOnly
{
get { return InnerHashtable.IsReadOnly; }
}
bool IDictionary.IsFixedSize
{
get { return InnerHashtable.IsFixedSize; }
}
bool ICollection.IsSynchronized
{
get { return InnerHashtable.IsSynchronized; }
}
ICollection IDictionary.Keys
{
get { return InnerHashtable.Keys; }
}
Object ICollection.SyncRoot
{
get { return InnerHashtable.SyncRoot; }
}
ICollection IDictionary.Values
{
get { return InnerHashtable.Values; }
}
public void CopyTo(Array array, int index)
{
InnerHashtable.CopyTo(array, index);
}
object IDictionary.this[object key]
{
get
{
object currentValue = InnerHashtable[key];
OnGet(key, currentValue);
return currentValue;
}
set
{
OnValidate(key, value);
bool keyExists = true;
Object temp = InnerHashtable[key];
if (temp == null)
{
keyExists = InnerHashtable.Contains(key);
}
OnSet(key, temp, value);
InnerHashtable[key] = value;
try
{
OnSetComplete(key, temp, value);
}
catch
{
if (keyExists)
{
InnerHashtable[key] = temp;
}
else
{
InnerHashtable.Remove(key);
}
throw;
}
}
}
bool IDictionary.Contains(object key)
{
return InnerHashtable.Contains(key);
}
void IDictionary.Add(object key, object value)
{
OnValidate(key, value);
OnInsert(key, value);
InnerHashtable.Add(key, value);
try
{
OnInsertComplete(key, value);
}
catch
{
InnerHashtable.Remove(key);
throw;
}
}
public void Clear()
{
OnClear();
InnerHashtable.Clear();
OnClearComplete();
}
void IDictionary.Remove(object key)
{
if (InnerHashtable.Contains(key))
{
Object temp = InnerHashtable[key];
OnValidate(key, temp);
OnRemove(key, temp);
InnerHashtable.Remove(key);
try
{
OnRemoveComplete(key, temp);
}
catch
{
InnerHashtable.Add(key, temp);
throw;
}
}
}
public IDictionaryEnumerator GetEnumerator()
{
return InnerHashtable.GetEnumerator();
}
IEnumerator IEnumerable.GetEnumerator()
{
return InnerHashtable.GetEnumerator();
}
protected virtual object OnGet(object key, object currentValue)
{
return currentValue;
}
protected virtual void OnSet(object key, object oldValue, object newValue)
{
}
protected virtual void OnInsert(object key, object value)
{
}
protected virtual void OnClear()
{
}
protected virtual void OnRemove(object key, object value)
{
}
protected virtual void OnValidate(object key, object value)
{
}
protected virtual void OnSetComplete(object key, object oldValue, object newValue)
{
}
protected virtual void OnInsertComplete(object key, object value)
{
}
protected virtual void OnClearComplete()
{
}
protected virtual void OnRemoveComplete(object key, object value)
{
}
}
}
| |
/// This code was generated by
/// \ / _ _ _| _ _
/// | (_)\/(_)(_|\/| |(/_ v1.0.0
/// / /
/// <summary>
/// WorkflowCumulativeStatisticsResource
/// </summary>
using Newtonsoft.Json;
using System;
using System.Collections.Generic;
using Twilio.Base;
using Twilio.Clients;
using Twilio.Converters;
using Twilio.Exceptions;
using Twilio.Http;
namespace Twilio.Rest.Taskrouter.V1.Workspace.Workflow
{
public class WorkflowCumulativeStatisticsResource : Resource
{
private static Request BuildFetchRequest(FetchWorkflowCumulativeStatisticsOptions options, ITwilioRestClient client)
{
return new Request(
HttpMethod.Get,
Rest.Domain.Taskrouter,
"/v1/Workspaces/" + options.PathWorkspaceSid + "/Workflows/" + options.PathWorkflowSid + "/CumulativeStatistics",
queryParams: options.GetParams(),
headerParams: null
);
}
/// <summary>
/// fetch
/// </summary>
/// <param name="options"> Fetch WorkflowCumulativeStatistics parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of WorkflowCumulativeStatistics </returns>
public static WorkflowCumulativeStatisticsResource Fetch(FetchWorkflowCumulativeStatisticsOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = client.Request(BuildFetchRequest(options, client));
return FromJson(response.Content);
}
#if !NET35
/// <summary>
/// fetch
/// </summary>
/// <param name="options"> Fetch WorkflowCumulativeStatistics parameters </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of WorkflowCumulativeStatistics </returns>
public static async System.Threading.Tasks.Task<WorkflowCumulativeStatisticsResource> FetchAsync(FetchWorkflowCumulativeStatisticsOptions options,
ITwilioRestClient client = null)
{
client = client ?? TwilioClient.GetRestClient();
var response = await client.RequestAsync(BuildFetchRequest(options, client));
return FromJson(response.Content);
}
#endif
/// <summary>
/// fetch
/// </summary>
/// <param name="pathWorkspaceSid"> The SID of the Workspace with the resource to fetch </param>
/// <param name="pathWorkflowSid"> Returns the list of Tasks that are being controlled by the Workflow with the
/// specified Sid value </param>
/// <param name="endDate"> Only include usage that occurred on or before this date </param>
/// <param name="minutes"> Only calculate statistics since this many minutes in the past </param>
/// <param name="startDate"> Only calculate statistics from on or after this date </param>
/// <param name="taskChannel"> Only calculate cumulative statistics on this TaskChannel </param>
/// <param name="splitByWaitTime"> A comma separated list of values that describes the thresholds to calculate
/// statistics on </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> A single instance of WorkflowCumulativeStatistics </returns>
public static WorkflowCumulativeStatisticsResource Fetch(string pathWorkspaceSid,
string pathWorkflowSid,
DateTime? endDate = null,
int? minutes = null,
DateTime? startDate = null,
string taskChannel = null,
string splitByWaitTime = null,
ITwilioRestClient client = null)
{
var options = new FetchWorkflowCumulativeStatisticsOptions(pathWorkspaceSid, pathWorkflowSid){EndDate = endDate, Minutes = minutes, StartDate = startDate, TaskChannel = taskChannel, SplitByWaitTime = splitByWaitTime};
return Fetch(options, client);
}
#if !NET35
/// <summary>
/// fetch
/// </summary>
/// <param name="pathWorkspaceSid"> The SID of the Workspace with the resource to fetch </param>
/// <param name="pathWorkflowSid"> Returns the list of Tasks that are being controlled by the Workflow with the
/// specified Sid value </param>
/// <param name="endDate"> Only include usage that occurred on or before this date </param>
/// <param name="minutes"> Only calculate statistics since this many minutes in the past </param>
/// <param name="startDate"> Only calculate statistics from on or after this date </param>
/// <param name="taskChannel"> Only calculate cumulative statistics on this TaskChannel </param>
/// <param name="splitByWaitTime"> A comma separated list of values that describes the thresholds to calculate
/// statistics on </param>
/// <param name="client"> Client to make requests to Twilio </param>
/// <returns> Task that resolves to A single instance of WorkflowCumulativeStatistics </returns>
public static async System.Threading.Tasks.Task<WorkflowCumulativeStatisticsResource> FetchAsync(string pathWorkspaceSid,
string pathWorkflowSid,
DateTime? endDate = null,
int? minutes = null,
DateTime? startDate = null,
string taskChannel = null,
string splitByWaitTime = null,
ITwilioRestClient client = null)
{
var options = new FetchWorkflowCumulativeStatisticsOptions(pathWorkspaceSid, pathWorkflowSid){EndDate = endDate, Minutes = minutes, StartDate = startDate, TaskChannel = taskChannel, SplitByWaitTime = splitByWaitTime};
return await FetchAsync(options, client);
}
#endif
/// <summary>
/// Converts a JSON string into a WorkflowCumulativeStatisticsResource object
/// </summary>
/// <param name="json"> Raw JSON string </param>
/// <returns> WorkflowCumulativeStatisticsResource object represented by the provided JSON </returns>
public static WorkflowCumulativeStatisticsResource FromJson(string json)
{
// Convert all checked exceptions to Runtime
try
{
return JsonConvert.DeserializeObject<WorkflowCumulativeStatisticsResource>(json);
}
catch (JsonException e)
{
throw new ApiException(e.Message, e);
}
}
/// <summary>
/// The SID of the Account that created the resource
/// </summary>
[JsonProperty("account_sid")]
public string AccountSid { get; private set; }
/// <summary>
/// The average time in seconds between Task creation and acceptance
/// </summary>
[JsonProperty("avg_task_acceptance_time")]
public int? AvgTaskAcceptanceTime { get; private set; }
/// <summary>
/// The beginning of the interval during which these statistics were calculated
/// </summary>
[JsonProperty("start_time")]
public DateTime? StartTime { get; private set; }
/// <summary>
/// The end of the interval during which these statistics were calculated
/// </summary>
[JsonProperty("end_time")]
public DateTime? EndTime { get; private set; }
/// <summary>
/// The total number of Reservations that were created for Workers
/// </summary>
[JsonProperty("reservations_created")]
public int? ReservationsCreated { get; private set; }
/// <summary>
/// The total number of Reservations accepted by Workers
/// </summary>
[JsonProperty("reservations_accepted")]
public int? ReservationsAccepted { get; private set; }
/// <summary>
/// The total number of Reservations that were rejected
/// </summary>
[JsonProperty("reservations_rejected")]
public int? ReservationsRejected { get; private set; }
/// <summary>
/// The total number of Reservations that were timed out
/// </summary>
[JsonProperty("reservations_timed_out")]
public int? ReservationsTimedOut { get; private set; }
/// <summary>
/// The total number of Reservations that were canceled
/// </summary>
[JsonProperty("reservations_canceled")]
public int? ReservationsCanceled { get; private set; }
/// <summary>
/// The total number of Reservations that were rescinded
/// </summary>
[JsonProperty("reservations_rescinded")]
public int? ReservationsRescinded { get; private set; }
/// <summary>
/// A list of objects that describe the Tasks canceled and reservations accepted above and below the specified thresholds
/// </summary>
[JsonProperty("split_by_wait_time")]
public object SplitByWaitTime { get; private set; }
/// <summary>
/// The wait duration statistics for Tasks that were accepted
/// </summary>
[JsonProperty("wait_duration_until_accepted")]
public object WaitDurationUntilAccepted { get; private set; }
/// <summary>
/// The wait duration statistics for Tasks that were canceled
/// </summary>
[JsonProperty("wait_duration_until_canceled")]
public object WaitDurationUntilCanceled { get; private set; }
/// <summary>
/// The total number of Tasks that were canceled
/// </summary>
[JsonProperty("tasks_canceled")]
public int? TasksCanceled { get; private set; }
/// <summary>
/// The total number of Tasks that were completed
/// </summary>
[JsonProperty("tasks_completed")]
public int? TasksCompleted { get; private set; }
/// <summary>
/// The total number of Tasks that entered the Workflow
/// </summary>
[JsonProperty("tasks_entered")]
public int? TasksEntered { get; private set; }
/// <summary>
/// The total number of Tasks that were deleted
/// </summary>
[JsonProperty("tasks_deleted")]
public int? TasksDeleted { get; private set; }
/// <summary>
/// The total number of Tasks that were moved from one queue to another
/// </summary>
[JsonProperty("tasks_moved")]
public int? TasksMoved { get; private set; }
/// <summary>
/// The total number of Tasks that were timed out of their Workflows
/// </summary>
[JsonProperty("tasks_timed_out_in_workflow")]
public int? TasksTimedOutInWorkflow { get; private set; }
/// <summary>
/// Returns the list of Tasks that are being controlled by the Workflow with the specified Sid value
/// </summary>
[JsonProperty("workflow_sid")]
public string WorkflowSid { get; private set; }
/// <summary>
/// The SID of the Workspace that contains the Workflow.
/// </summary>
[JsonProperty("workspace_sid")]
public string WorkspaceSid { get; private set; }
/// <summary>
/// The absolute URL of the Workflow statistics resource
/// </summary>
[JsonProperty("url")]
public Uri Url { get; private set; }
private WorkflowCumulativeStatisticsResource()
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Diagnostics.Tracing;
using System.Reflection;
using System.Runtime.CompilerServices;
using System.Text;
namespace System.Diagnostics
{
/// <summary>
/// DiagnosticSourceEventSource serves two purposes
///
/// 1) It allows debuggers to inject code via Function evaluation. This is the purpose of the
/// BreakPointWithDebuggerFuncEval function in the 'OnEventCommand' method. Basically even in
/// release code, debuggers can place a breakpoint in this method and then trigger the
/// DiagnosticSourceEventSource via ETW. Thus from outside the process you can get a hook that
/// is guaranteed to happen BEFORE any DiangosticSource events (if the process is just starting)
/// or as soon as possible afterward if it is on attach.
///
/// 2) It provides a 'bridge' that allows DiagnosticSource messages to be forwarded to EventListers
/// or ETW. You can do this by enabling the Microsoft-Diagnostics-DiagnosticSource with the
/// 'Events' keyword (for diagnostics purposes, you should also turn on the 'Messages' keyword.
///
/// This EventSource defines a EventSource argument called 'FilterAndPayloadSpecs' that defines
/// what DiagnsoticSources to enable and what parts of the payload to serialize into the key-value
/// list that will be forwarded to the EventSource. If it is empty, all serializable parts of
/// every DiagnosticSource event will be forwarded (this is NOT recommended for monitoring but
/// can be useful for discovery).
///
/// The FilterAndPayloadSpecs is one long string with the following structures
///
/// * It is a newline separated list of FILTER_AND_PAYLOAD_SPEC
/// * a FILTER_AND_PAYLOAD_SPEC can be
/// * EVENT_NAME : TRANSFORM_SPECS
/// * EMPTY - turns on all sources with implicit payload elements.
/// * an EVENTNAME can be
/// * DIAGNOSTIC_SOURCE_NAME / DIAGNOSTC_EVENT_NAME @ EVENT_SOURCE_EVENTNAME - give the name as well as the EventSource event to log it under.
/// * DIAGNOSTIC_SOURCE_NAME / DIAGNOSTC_EVENT_NAME
/// * DIAGNOSTIC_SOURCE_NAME - which wildcards every event in the Diagnostic source or
/// * EMPTY - which turns on all sources
/// * TRANSFORM_SPEC is a semicolon separated list of TRANSFORM_SPEC, which can be
/// * - TRANSFORM_SPEC - the '-' indicates that implicit payload elements should be suppressed
/// * VARIABLE_NAME = PROPERTY_SPEC - indicates that a payload element 'VARIABLE_NAME' is created from PROPERTY_SPEC
/// * PROPERTY_SPEC - This is a shortcut where VARIABLE_NAME is the LAST property name
/// * a PROPERTY_SPEC is basically a list of names separated by '.'
/// * PROPERTY_NAME - fetches a property from the DiagnosticSource payload object
/// * PROPERTY_NAME . PROPERTY NAME - fetches a sub-property of the object.
///
/// Example1:
///
/// "BridgeTestSource1/TestEvent1:cls_Point_X=cls.Point.X;cls_Point_Y=cls.Point.Y\r\n" +
/// "BridgeTestSource2/TestEvent2:-cls.Url"
///
/// This indicates that two events should be turned on, The 'TestEvent1' event in BridgeTestSource1 and the
/// 'TestEvent2' in BridgeTestSource2. In the first case, because the transform did not begin with a -
/// any primitive type/string of 'TestEvent1's payload will be serialized into the output. In addition if
/// there a property of the payload object called 'cls' which in turn has a property 'Point' which in turn
/// has a property 'X' then that data is also put in the output with the name cls_Point_X. Similarly
/// if cls.Point.Y exists, then that value will also be put in the output with the name cls_Point_Y.
///
/// For the 'BridgeTestSource2/TestEvent2' event, because the - was specified NO implicit fields will be
/// generated, but if there is a property call 'cls' which has a property 'Url' then that will be placed in
/// the output with the name 'Url' (since that was the last property name used and no Variable= clause was
/// specified.
///
/// Example:
///
/// "BridgeTestSource1\r\n" +
/// "BridgeTestSource2"
///
/// This will enable all events for the BridgeTestSource1 and BridgeTestSource2 sources. Any string/primitive
/// properties of any of the events will be serialized into the output.
///
/// Example:
///
/// ""
///
/// This turns on all DiagnosticSources Any string/primitive properties of any of the events will be serialized
/// into the output. This is not likely to be a good idea as it will be very verbose, but is useful to quickly
/// discover what is available.
///
///
/// * How data is logged in the EventSource
///
/// By default all data from Diagnostic sources is logged to the the DiagnosticEventSouce event called 'Event'
/// which has three fields
///
/// string SourceName,
/// string EventName,
/// IEnumerable[KeyValuePair[string, string]] Argument
///
/// However to support start-stop activity tracking, there are six other events that can be used
///
/// Activity1Start
/// Activity1Stop
/// Activity2Start
/// Activity2Stop
/// RecursiveActivity1Start
/// RecursiveActivity1Stop
///
/// By using the SourceName/EventName@EventSourceName syntax, you can force particular DiagnosticSource events to
/// be logged with one of these EventSource events. This is useful because the events above have start-stop semantics
/// which means that they create activity IDs that are attached to all logging messages between the start and
/// the stop (see https://blogs.msdn.microsoft.com/vancem/2015/09/14/exploring-eventsource-activity-correlation-and-causation-features/)
///
/// For example the specification
///
/// "MyDiagnosticSource/RequestStart@Activity1Start\r\n" +
/// "MyDiagnosticSource/RequestStop@Activity1Stop\r\n" +
/// "MyDiagnosticSource/SecurityStart@Activity2Start\r\n" +
/// "MyDiagnosticSource/SecurityStop@Activity2Stop\r\n"
///
/// Defines that RequestStart will be logged with the EventSource Event Activity1Start (and the cooresponding stop) which
/// means that all events caused between these two markers will have an activity ID assocatied with this start event.
/// Simmilarly SecurityStart is mapped to Activity2Start.
///
/// Note you can map many DiangosticSource events to the same EventSource Event (e.g. Activity1Start). As long as the
/// activities don't nest, you can reuse the same event name (since the payloads have the DiagnosticSource name which can
/// disambiguate). However if they nest you need to use another EventSource event because the rules of EventSource
/// activities state that a start of the same event terminates any existing activity of the same name.
///
/// As its name suggests RecursiveActivity1Start, is marked as recursive and thus can be used when the activity can nest with
/// itself. This should not be a 'top most' activity because it is not 'self healing' (if you miss a stop, then the
/// activity NEVER ends).
///
/// See the DiagnosticSourceEventSourceBridgeTest.cs for more explicit examples of using this bridge.
/// </summary>
[EventSource(Name = "Microsoft-Diagnostics-DiagnosticSource")]
internal class DiagnosticSourceEventSource : EventSource
{
public static DiagnosticSourceEventSource Logger = new DiagnosticSourceEventSource();
public class Keywords
{
/// <summary>
/// Indicates diagnostics messages from DiagnosticSourceEventSource should be included.
/// </summary>
public const EventKeywords Messages = (EventKeywords)0x1;
/// <summary>
/// Indicates that all events from all diagnostic sources should be forwarded to the EventSource using the 'Event' event.
/// </summary>
public const EventKeywords Events = (EventKeywords)0x2;
// Some ETW logic does not support passing arguments to the EventProvider. To get around
// this in common cases, we define some keywords that basically stand in for particular common argumnents
// That way at least the common cases can be used by everyone (and it also compresses things).
// We start these keywords at 0x1000. See below for the values these keywords represent
// Because we want all keywords on to still mean 'dump everything by default' we have another keyword
// IgnoreShorcutKeywords which must be OFF in order for the shortcuts to work thus the all 1s keyword
// still means what you expect.
public const EventKeywords IgnoreShortCutKeywords = (EventKeywords)0x0800;
public const EventKeywords AspNetCoreHosting = (EventKeywords)0x1000;
public const EventKeywords EntityFrameworkCoreCommands = (EventKeywords)0x2000;
};
// Setting AspNetCoreHosting is like having this in the FilterAndPayloadSpecs string
// It turns on basic hostig events.
private readonly string AspNetCoreHostingKeywordValue =
"Microsoft.AspNetCore/Microsoft.AspNetCore.Hosting.BeginRequest@Activity1Start:-" +
"httpContext.Request.Method;" +
"httpContext.Request.Host;" +
"httpContext.Request.Path;" +
"httpContext.Request.QueryString" +
"\n" +
"Microsoft.AspNetCore/Microsoft.AspNetCore.Hosting.EndRequest@Activity1Stop:-";
// Setting EntityFrameworkCoreCommands is like having this in the FilterAndPayloadSpecs string
// It turns on basic SQL commands.
private readonly string EntityFrameworkCoreCommandsKeywordValue =
"Microsoft.EntityFrameworkCore/Microsoft.EntityFrameworkCore.BeforeExecuteCommand@Activity2Start:-" +
"Command.Connection.DataSource;" +
"Command.Connection.Database;" +
"Command.CommandText" +
"\n" +
"Microsoft.EntityFrameworkCore/Microsoft.EntityFrameworkCore.AfterExecuteCommand@Activity2Stop:-";
/// <summary>
/// Used to send ad-hoc diagnostics to humans.
/// </summary>
[Event(1, Keywords = Keywords.Messages)]
public void Message(string Message)
{
WriteEvent(1, Message);
}
#if !NO_EVENTSOURCE_COMPLEX_TYPE_SUPPORT
/// <summary>
/// Events from DiagnosticSource can be forwarded to EventSource using this event.
/// </summary>
[Event(2, Keywords = Keywords.Events)]
private void Event(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(2, SourceName, EventName, Arguments);
}
#endif
/// <summary>
/// This is only used on V4.5 systems that don't have the ability to log KeyValuePairs directly.
/// It will eventually go away, but we should always reserve the ID for this.
/// </summary>
[Event(3, Keywords = Keywords.Events)]
private void EventJson(string SourceName, string EventName, string ArgmentsJson)
{
WriteEvent(3, SourceName, EventName, ArgmentsJson);
}
#if !NO_EVENTSOURCE_COMPLEX_TYPE_SUPPORT
/// <summary>
/// Used to mark the beginning of an activity
/// </summary>
[Event(4, Keywords = Keywords.Events)]
private void Activity1Start(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(4, SourceName, EventName, Arguments);
}
/// <summary>
/// Used to mark the end of an activity
/// </summary>
[Event(5, Keywords = Keywords.Events)]
private void Activity1Stop(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(5, SourceName, EventName, Arguments);
}
/// <summary>
/// Used to mark the beginning of an activity
/// </summary>
[Event(6, Keywords = Keywords.Events)]
private void Activity2Start(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(6, SourceName, EventName, Arguments);
}
/// <summary>
/// Used to mark the end of an activity that can be recursive.
/// </summary>
[Event(7, Keywords = Keywords.Events)]
private void Activity2Stop(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(7, SourceName, EventName, Arguments);
}
/// <summary>
/// Used to mark the beginning of an activity
/// </summary>
[Event(8, Keywords = Keywords.Events, ActivityOptions = EventActivityOptions.Recursive)]
private void RecursiveActivity1Start(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(8, SourceName, EventName, Arguments);
}
/// <summary>
/// Used to mark the end of an activity that can be recursive.
/// </summary>
[Event(9, Keywords = Keywords.Events, ActivityOptions = EventActivityOptions.Recursive)]
private void RecursiveActivity1Stop(string SourceName, string EventName, IEnumerable<KeyValuePair<string, string>> Arguments)
{
WriteEvent(9, SourceName, EventName, Arguments);
}
#endif
/// <summary>
/// Fires when a new DiagnosticSource becomes available.
/// </summary>
/// <param name="SourceName"></param>
[Event(10, Keywords = Keywords.Events)]
private void NewDiagnosticListener(string SourceName)
{
WriteEvent(10, SourceName);
}
#region private
#if NO_EVENTSOURCE_COMPLEX_TYPE_SUPPORT
/// <summary>
/// Converts a keyvalue bag to JSON. Only used on V4.5 EventSources.
/// </summary>
private static string ToJson(IEnumerable<KeyValuePair<string, string>> keyValues)
{
StringBuilder sb = new StringBuilder();
sb.AppendLine("{");
bool first = true;
foreach (var keyValue in keyValues)
{
if (!first)
sb.Append(',').AppendLine();
first = false;
sb.Append('"').Append(keyValue.Key).Append("\":\"");
// Write out the value characters, escaping things as needed.
foreach(var c in keyValue.Value)
{
if (Char.IsControl(c))
{
if (c == '\n')
sb.Append("\\n");
else if (c == '\r')
sb.Append("\\r");
else
sb.Append("\\u").Append(((int)c).ToString("x").PadLeft(4, '0'));
}
else
{
if (c == '"' || c == '\\')
sb.Append('\\');
sb.Append(c);
}
}
sb.Append('"'); // Close the string.
}
sb.AppendLine().AppendLine("}");
return sb.ToString();
}
#endif
#if !NO_EVENTSOURCE_COMPLEX_TYPE_SUPPORT
/// <summary>
/// This constructor uses EventSourceSettings which is only available on V4.6 and above
/// systems. We use the EventSourceSettings to turn on support for complex types.
/// </summary>
private DiagnosticSourceEventSource() : base(EventSourceSettings.EtwSelfDescribingEventFormat) { }
#endif
/// <summary>
/// Called when the EventSource gets a command from a EventListener or ETW.
/// </summary>
[NonEvent]
protected override void OnEventCommand(EventCommandEventArgs command)
{
// On every command (which the debugger can force by turning on this EventSource with ETW)
// call a function that the debugger can hook to do an arbitrary func evaluation.
BreakPointWithDebuggerFuncEval();
lock (this)
{
if ((command.Command == EventCommand.Update || command.Command == EventCommand.Enable) &&
IsEnabled(EventLevel.Informational, Keywords.Events))
{
string filterAndPayloadSpecs;
command.Arguments.TryGetValue("FilterAndPayloadSpecs", out filterAndPayloadSpecs);
if (!IsEnabled(EventLevel.Informational, Keywords.IgnoreShortCutKeywords))
{
if (IsEnabled(EventLevel.Informational, Keywords.AspNetCoreHosting))
filterAndPayloadSpecs = NewLineSeparate(filterAndPayloadSpecs, AspNetCoreHostingKeywordValue);
if (IsEnabled(EventLevel.Informational, Keywords.EntityFrameworkCoreCommands))
filterAndPayloadSpecs = NewLineSeparate(filterAndPayloadSpecs, EntityFrameworkCoreCommandsKeywordValue);
}
FilterAndTransform.CreateFilterAndTransformList(ref _specs, filterAndPayloadSpecs, this);
}
else if (command.Command == EventCommand.Update || command.Command == EventCommand.Disable)
{
FilterAndTransform.DestroyFilterAndTransformList(ref _specs);
}
}
}
// trivial helper to allow you to join two strings the first of which can be null.
private static string NewLineSeparate(string str1, string str2)
{
Debug.Assert(str2 != null);
if (string.IsNullOrEmpty(str1))
return str2;
return str1 + "\n" + str2;
}
#region debugger hooks
private volatile bool _false; // A value that is always false but the compiler does not know this.
/// <summary>
/// A function which is fully interruptible even in release code so we can stop here and
/// do function evaluation in the debugger. Thus this is just a place that is useful
/// for the debugger to place a breakpoint where it can inject code with function evaluation
/// </summary>
[NonEvent, MethodImpl(MethodImplOptions.NoInlining | MethodImplOptions.NoOptimization)]
private void BreakPointWithDebuggerFuncEval()
{
new object(); // This is only here because it helps old desktop runtimes emit a GC safe point at the start of the method
while (_false)
{
_false = false;
}
}
#endregion
#region EventSource hooks
/// <summary>
/// FilterAndTransform represents on transformation specification from a DiagnosticsSource
/// to EventSource's 'Event' method. (e.g. MySource/MyEvent:out=prop1.prop2.prop3).
/// Its main method is 'Morph' which takes a DiagnosticSource object and morphs it into
/// a list of string,string key value pairs.
///
/// This method also contains that static 'Create/Destroy FilterAndTransformList, which
/// simply parse a series of transformation specifications.
/// </summary>
internal class FilterAndTransform
{
/// <summary>
/// Parses filterAndPayloadSpecs which is a list of lines each of which has the from
///
/// DiagnosticSourceName/EventName:PAYLOAD_SPEC
///
/// where PAYLOADSPEC is a semicolon separated list of specifications of the form
///
/// OutputName=Prop1.Prop2.PropN
///
/// Into linked list of FilterAndTransform that together forward events from the given
/// DiagnosticSource's to 'eventSource'. Sets the 'specList' variable to this value
/// (destroying anything that was there previously).
///
/// By default any serializable properties of the payload object are also included
/// in the output payload, however this feature and be tuned off by prefixing the
/// PAYLOADSPEC with a '-'.
/// </summary>
public static void CreateFilterAndTransformList(ref FilterAndTransform specList, string filterAndPayloadSpecs, DiagnosticSourceEventSource eventSource)
{
DestroyFilterAndTransformList(ref specList); // Stop anything that was on before.
if (filterAndPayloadSpecs == null)
filterAndPayloadSpecs = "";
// Points just beyond the last point in the string that has yet to be parsed. Thus we start with the whole string.
int endIdx = filterAndPayloadSpecs.Length;
for (;;)
{
// Skip trailing whitespace.
while (0 < endIdx && Char.IsWhiteSpace(filterAndPayloadSpecs[endIdx - 1]))
--endIdx;
int newlineIdx = filterAndPayloadSpecs.LastIndexOf('\n', endIdx - 1, endIdx);
int startIdx = 0;
if (0 <= newlineIdx)
startIdx = newlineIdx + 1; // starts after the newline, or zero if we don't find one.
// Skip leading whitespace
while (startIdx < endIdx && Char.IsWhiteSpace(filterAndPayloadSpecs[startIdx]))
startIdx++;
specList = new FilterAndTransform(filterAndPayloadSpecs, startIdx, endIdx, eventSource, specList);
endIdx = newlineIdx;
if (endIdx < 0)
break;
}
}
/// <summary>
/// This destroys (turns off) the FilterAndTransform stopping the forwarding started with CreateFilterAndTransformList
/// </summary>
/// <param name="specList"></param>
public static void DestroyFilterAndTransformList(ref FilterAndTransform specList)
{
var curSpec = specList;
specList = null; // Null out the list
while (curSpec != null) // Dispose everything in the list.
{
curSpec.Dispose();
curSpec = curSpec.Next;
}
}
/// <summary>
/// Creates one FilterAndTransform specification from filterAndPayloadSpec starting at 'startIdx' and ending just before 'endIdx'.
/// This FilterAndTransform will subscribe to DiagnosticSources specified by the specification and forward them to 'eventSource.
/// For convenience, the 'Next' field is set to the 'next' parameter, so you can easily form linked lists.
/// </summary>
public FilterAndTransform(string filterAndPayloadSpec, int startIdx, int endIdx, DiagnosticSourceEventSource eventSource, FilterAndTransform next)
{
#if DEBUG
string spec = filterAndPayloadSpec.Substring(startIdx, endIdx - startIdx);
#endif
Next = next;
_eventSource = eventSource;
string listenerNameFilter = null; // Means WildCard.
string eventNameFilter = null; // Means WildCard.
string activityName = null;
var startTransformIdx = startIdx;
var endEventNameIdx = endIdx;
var colonIdx = filterAndPayloadSpec.IndexOf(':', startIdx, endIdx - startIdx);
if (0 <= colonIdx)
{
endEventNameIdx = colonIdx;
startTransformIdx = colonIdx + 1;
}
// Parse the Source/Event name into listenerNameFilter and eventNameFilter
var slashIdx = filterAndPayloadSpec.IndexOf('/', startIdx, endEventNameIdx - startIdx);
if (0 <= slashIdx)
{
listenerNameFilter = filterAndPayloadSpec.Substring(startIdx, slashIdx - startIdx);
var atIdx = filterAndPayloadSpec.IndexOf('@', slashIdx + 1, endEventNameIdx - slashIdx - 1);
if (0 <= atIdx)
{
activityName = filterAndPayloadSpec.Substring(atIdx + 1, endEventNameIdx - atIdx - 1);
eventNameFilter = filterAndPayloadSpec.Substring(slashIdx + 1, atIdx - slashIdx - 1);
}
else
{
eventNameFilter = filterAndPayloadSpec.Substring(slashIdx + 1, endEventNameIdx - slashIdx - 1);
}
}
else if (startIdx < endEventNameIdx)
{
listenerNameFilter = filterAndPayloadSpec.Substring(startIdx, endEventNameIdx - startIdx);
}
_eventSource.Message("DiagnosticSource: Enabling '" + (listenerNameFilter ?? "*") + "/" + (eventNameFilter ?? "*") + "'");
// If the transform spec begins with a - it means you don't want implicit transforms.
if (startTransformIdx < endIdx && filterAndPayloadSpec[startTransformIdx] == '-')
{
_eventSource.Message("DiagnosticSource: suppressing implicit transforms.");
_noImplicitTransforms = true;
startTransformIdx++;
}
// Parse all the explicit transforms, if present
if (startTransformIdx < endIdx)
{
for (;;)
{
int specStartIdx = startTransformIdx;
int semiColonIdx = filterAndPayloadSpec.LastIndexOf(';', endIdx - 1, endIdx - startTransformIdx);
if (0 <= semiColonIdx)
specStartIdx = semiColonIdx + 1;
// Ignore empty specifications.
if (specStartIdx < endIdx)
{
if (_eventSource.IsEnabled(EventLevel.Informational, Keywords.Messages))
_eventSource.Message("DiagnosticSource: Parsing Explicit Transform '" + filterAndPayloadSpec.Substring(specStartIdx, endIdx - specStartIdx) + "'");
_explicitTransforms = new TransformSpec(filterAndPayloadSpec, specStartIdx, endIdx, _explicitTransforms);
}
if (startTransformIdx == specStartIdx)
break;
endIdx = semiColonIdx;
}
}
Action<string, string, IEnumerable<KeyValuePair<string, string>>> writeEvent = null;
if (activityName != null && activityName.Contains("Activity"))
{
MethodInfo writeEventMethodInfo = typeof(DiagnosticSourceEventSource).GetTypeInfo().GetDeclaredMethod(activityName);
if (writeEventMethodInfo != null)
{
// This looks up the activityName (which needs to be a name of an event on DiagnosticSourceEventSource
// like Activity1Start and returns that method). This allows us to have a number of them and this code
// just works.
try
{
writeEvent = (Action<string, string, IEnumerable<KeyValuePair<string, string>>>)
writeEventMethodInfo.CreateDelegate(typeof(Action<string, string, IEnumerable<KeyValuePair<string, string>>>), _eventSource);
}
catch (Exception) { }
}
if (writeEvent == null)
_eventSource.Message("DiagnosticSource: Could not find Event to log Activity " + activityName);
}
if (writeEvent == null)
{
#if !NO_EVENTSOURCE_COMPLEX_TYPE_SUPPORT
writeEvent = _eventSource.Event;
#else
writeEvent = delegate (string sourceName, string eventName, IEnumerable<KeyValuePair<string, string>> arguments)
{
_eventSource.EventJson(sourceName, eventName, ToJson(arguments));
};
#endif
}
// Set up a subscription that watches for the given Diagnostic Sources and events which will call back
// to the EventSource.
_diagnosticsListenersSubscription = DiagnosticListener.AllListeners.Subscribe(new CallbackObserver<DiagnosticListener>(delegate (DiagnosticListener newListener)
{
if (listenerNameFilter == null || listenerNameFilter == newListener.Name)
{
_eventSource.NewDiagnosticListener(newListener.Name);
Predicate<string> eventNameFilterPredicate = null;
if (eventNameFilter != null)
eventNameFilterPredicate = (string eventName) => eventNameFilter == eventName;
var subscription = newListener.Subscribe(new CallbackObserver<KeyValuePair<string, object>>(delegate (KeyValuePair<string, object> evnt)
{
// The filter given to the DiagnosticSource may not work if users don't is 'IsEnabled' as expected.
// Thus we look for any events that may have snuck through and filter them out before forwarding.
if (eventNameFilter != null && eventNameFilter != evnt.Key)
return;
var outputArgs = this.Morph(evnt.Value);
var eventName = evnt.Key;
writeEvent(newListener.Name, eventName, outputArgs);
}), eventNameFilterPredicate);
_liveSubscriptions = new Subscriptions(subscription, _liveSubscriptions);
}
}));
}
private void Dispose()
{
if (_diagnosticsListenersSubscription != null)
{
_diagnosticsListenersSubscription.Dispose();
_diagnosticsListenersSubscription = null;
}
if (_liveSubscriptions != null)
{
var subscr = _liveSubscriptions;
_liveSubscriptions = null;
while (subscr != null)
{
subscr.Subscription.Dispose();
subscr = subscr.Next;
}
}
}
public List<KeyValuePair<string, string>> Morph(object args)
{
// Transform the args into a bag of key-value strings.
var outputArgs = new List<KeyValuePair<string, string>>();
if (args != null)
{
if (!_noImplicitTransforms)
{
Type argType = args.GetType();
if (_expectedArgType != argType)
{
// Figure out the default properties to send on to EventSource. These are all string or primitive properties.
_implicitTransforms = null;
TransformSpec newSerializableArgs = null;
TypeInfo curTypeInfo = argType.GetTypeInfo();
foreach (var property in curTypeInfo.DeclaredProperties)
{
var propertyType = property.PropertyType;
if (propertyType == typeof(string) || propertyType.GetTypeInfo().IsPrimitive)
newSerializableArgs = new TransformSpec(property.Name, 0, property.Name.Length, newSerializableArgs);
}
_expectedArgType = argType;
_implicitTransforms = Reverse(newSerializableArgs);
}
// Fetch all the fields that are already serializable
if (_implicitTransforms != null)
{
for (var serializableArg = _implicitTransforms; serializableArg != null; serializableArg = serializableArg.Next)
outputArgs.Add(serializableArg.Morph(args));
}
}
if (_explicitTransforms != null)
{
for (var explicitTransform = _explicitTransforms; explicitTransform != null; explicitTransform = explicitTransform.Next)
{
var keyValue = explicitTransform.Morph(args);
if (keyValue.Value != null)
outputArgs.Add(keyValue);
}
}
}
return outputArgs;
}
public FilterAndTransform Next;
#region private
// Reverses a linked list (of TransformSpecs) in place.
private static TransformSpec Reverse(TransformSpec list)
{
TransformSpec ret = null;
while (list != null)
{
var next = list.Next;
list.Next = ret;
ret = list;
list = next;
}
return ret;
}
private IDisposable _diagnosticsListenersSubscription; // This is our subscription that listens for new Diagnostic source to appear.
private Subscriptions _liveSubscriptions; // These are the subscriptions that we are currently forwarding to the EventSource.
private bool _noImplicitTransforms; // Listener can say they don't want implicit transforms.
private Type _expectedArgType; // This is the type where 'implicitTransforms is built for'
private TransformSpec _implicitTransforms; // payload to include because the DiagnosticSource's object fields are already serializable
private TransformSpec _explicitTransforms; // payload to include because the user explicitly indicated how to fetch the field.
private DiagnosticSourceEventSource _eventSource; // Where the data is written to.
#endregion
}
/// <summary>
/// Transform spec represents a string that describes how to extract a piece of data from
/// the DiagnosticSource payload. An example string is OUTSTR=EVENT_VALUE.PROP1.PROP2.PROP3
/// It has a Next field so they can be chained together in a linked list.
/// </summary>
internal class TransformSpec
{
/// <summary>
/// parse the strings 'spec' from startIdx to endIdx (points just beyond the last considered char)
/// The syntax is ID1=ID2.ID3.ID4 .... Where ID1= is optional.
/// </summary>
public TransformSpec(string transformSpec, int startIdx, int endIdx, TransformSpec next = null)
{
Debug.Assert(transformSpec != null && startIdx < endIdx);
#if DEBUG
string spec = transformSpec.Substring(startIdx, endIdx - startIdx);
#endif
Next = next;
// Pick off the Var=
int equalsIdx = transformSpec.IndexOf('=', startIdx, endIdx - startIdx);
if (0 <= equalsIdx)
{
_outputName = transformSpec.Substring(startIdx, equalsIdx - startIdx);
startIdx = equalsIdx + 1;
}
// Working from back to front, create a PropertySpec for each .ID in the string.
while (startIdx < endIdx)
{
int dotIdx = transformSpec.LastIndexOf('.', endIdx - 1, endIdx - startIdx);
int idIdx = startIdx;
if (0 <= dotIdx)
idIdx = dotIdx + 1;
string propertName = transformSpec.Substring(idIdx, endIdx - idIdx);
_fetches = new PropertySpec(propertName, _fetches);
// If the user did not explicitly set a name, it is the last one (first to be processed from the end).
if (_outputName == null)
_outputName = propertName;
endIdx = dotIdx; // This works even when LastIndexOf return -1.
}
}
/// <summary>
/// Given the DiagnosticSourcePayload 'obj', compute a key-value pair from it. For example
/// if the spec is OUTSTR=EVENT_VALUE.PROP1.PROP2.PROP3 and the ultimate value of PROP3 is
/// 10 then the return key value pair is KeyValuePair("OUTSTR","10")
/// </summary>
public KeyValuePair<string, string> Morph(object obj)
{
for (PropertySpec cur = _fetches; cur != null; cur = cur.Next)
{
if (obj != null)
obj = cur.Fetch(obj);
}
return new KeyValuePair<string, string>(_outputName, obj?.ToString());
}
/// <summary>
/// A public field that can be used to form a linked list.
/// </summary>
public TransformSpec Next;
#region private
/// <summary>
/// A PropertySpec represents information needed to fetch a property from
/// and efficiently. Thus it represents a '.PROP' in a TransformSpec
/// (and a transformSpec has a list of these).
/// </summary>
internal class PropertySpec
{
/// <summary>
/// Make a new PropertySpec for a property named 'propertyName'.
/// For convenience you can set he 'next' field to form a linked
/// list of PropertySpecs.
/// </summary>
public PropertySpec(string propertyName, PropertySpec next = null)
{
Next = next;
_propertyName = propertyName;
}
/// <summary>
/// Given an object fetch the property that this PropertySpec represents.
/// </summary>
public object Fetch(object obj)
{
Type objType = obj.GetType();
if (objType != _expectedType)
{
var typeInfo = objType.GetTypeInfo();
_fetchForExpectedType = PropertyFetch.FetcherForProperty(typeInfo.GetDeclaredProperty(_propertyName));
_expectedType = objType;
}
return _fetchForExpectedType.Fetch(obj);
}
/// <summary>
/// A public field that can be used to form a linked list.
/// </summary>
public PropertySpec Next;
#region private
/// <summary>
/// PropertyFetch is a helper class. It takes a PropertyInfo and then knows how
/// to efficiently fetch that property from a .NET object (See Fetch method).
/// It hides some slightly complex generic code.
/// </summary>
class PropertyFetch
{
/// <summary>
/// Create a property fetcher from a .NET Reflection PropertyInfo class that
/// represents a property of a particular type.
/// </summary>
public static PropertyFetch FetcherForProperty(PropertyInfo propertyInfo)
{
if (propertyInfo == null)
return new PropertyFetch(); // returns null on any fetch.
var typedPropertyFetcher = typeof(TypedFetchProperty<,>);
var instantiatedTypedPropertyFetcher = typedPropertyFetcher.GetTypeInfo().MakeGenericType(
propertyInfo.DeclaringType, propertyInfo.PropertyType);
return (PropertyFetch)Activator.CreateInstance(instantiatedTypedPropertyFetcher, propertyInfo);
}
/// <summary>
/// Given an object, fetch the property that this propertyFech represents.
/// </summary>
public virtual object Fetch(object obj) { return null; }
#region private
private class TypedFetchProperty<TObject, TProperty> : PropertyFetch
{
public TypedFetchProperty(PropertyInfo property)
{
_propertyFetch = (Func<TObject, TProperty>)property.GetMethod.CreateDelegate(typeof(Func<TObject, TProperty>));
}
public override object Fetch(object obj)
{
return _propertyFetch((TObject)obj);
}
private readonly Func<TObject, TProperty> _propertyFetch;
}
#endregion
}
private string _propertyName;
private Type _expectedType;
private PropertyFetch _fetchForExpectedType;
#endregion
}
private string _outputName;
private PropertySpec _fetches;
#endregion
}
/// <summary>
/// CallbackObserver is a adapter class that creates an observer (which you can pass
/// to IObservable.Subscribe), and calls the given callback every time the 'next'
/// operation on the IObserver happens.
/// </summary>
/// <typeparam name="T"></typeparam>
internal class CallbackObserver<T> : IObserver<T>
{
public CallbackObserver(Action<T> callback) { _callback = callback; }
#region private
public void OnCompleted() { }
public void OnError(Exception error) { }
public void OnNext(T value) { _callback(value); }
private Action<T> _callback;
#endregion
}
// A linked list of IObservable subscriptions (which are IDisposable).
// We use this to keep track of the DiagnosticSource subscriptions.
// We use this linked list for thread atomicity
internal class Subscriptions
{
public Subscriptions(IDisposable subscription, Subscriptions next)
{
Subscription = subscription;
Next = next;
}
public IDisposable Subscription;
public Subscriptions Next;
}
#endregion
private FilterAndTransform _specs; // Transformation specifications that indicate which sources/events are forwarded.
#endregion
}
}
| |
//
// Test the generated API selectors against typos or non-existing cases
//
// Authors:
// Sebastien Pouliot <[email protected]>
//
// Copyright 2012-2013 Xamarin Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
using System;
using System.Reflection;
using NUnit.Framework;
#if MONOMAC
using MonoMac.Foundation;
using MonoMac.ObjCRuntime;
#else
using MonoTouch.Foundation;
using MonoTouch.ObjCRuntime;
#endif
namespace TouchUnit.Bindings {
public abstract class ApiSelectorTest : ApiBaseTest {
protected int Errors;
// not everything should be even tried
protected virtual bool Skip (Type type)
{
// skip delegate (and other protocol references)
foreach (object ca in type.GetCustomAttributes (false)) {
if (ca is ModelAttribute)
return true;
}
return false;
}
protected virtual bool Skip (Type type, string selectorName)
{
return false;
}
protected virtual bool CheckResponse (bool value, Type actualType, Type declaredType, ref string name)
{
if (value)
return true;
name = actualType.FullName + " : " + name;
return false;
}
[Test]
public void InstanceMethods ()
{
int n = 0;
IntPtr responds_handle = Selector.GetHandle ("instancesRespondToSelector:");
foreach (Type t in Assembly.GetTypes ()) {
if (t.IsNested || !NSObjectType.IsAssignableFrom (t))
continue;
if (Skip (t) || SkipDueToAttribute (t))
continue;
FieldInfo fi = t.GetField ("class_ptr", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static);
if (fi == null)
continue; // e.g. *Delegate
IntPtr class_ptr = (IntPtr) fi.GetValue (null);
foreach (var m in t.GetMethods (BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Instance)) {
if (m.DeclaringType != t || SkipDueToAttribute (m))
continue;
foreach (object ca in m.GetCustomAttributes (true)) {
ExportAttribute export = (ca as ExportAttribute);
if (export == null)
continue;
string name = export.Selector;
if (Skip (t, name))
continue;
bool result = Messaging.bool_objc_msgSend_IntPtr (class_ptr, responds_handle, Selector.GetHandle (name));
bool response = CheckResponse (result, t, m.DeclaringType, ref name);
if (!ContinueOnFailure)
Assert.IsTrue (response, name);
else if (!response) {
CheckResponse (result, t, m.DeclaringType, ref name);
Console.WriteLine ("[FAIL] {0}", name);
Errors++;
}
n++;
}
}
}
Assert.AreEqual (0, Errors, "{0} errors found in {1} instance selector validated", Errors, n);
}
protected virtual void Dispose (NSObject obj, Type type)
{
obj.Dispose ();
}
// funny, this is how I envisioned the instance version... before hitting run :|
protected virtual bool CheckStaticResponse (bool value, Type actualType, Type declaredType, ref string name)
{
if (value)
return true;
name = actualType.FullName + " : " + name;
return false;
}
[Test]
public void StaticMethods ()
{
Errors = 0;
int n = 0;
IntPtr responds_handle = Selector.GetHandle ("respondsToSelector:");
foreach (Type t in Assembly.GetTypes ()) {
if (t.IsNested || !NSObjectType.IsAssignableFrom (t))
continue;
if (Skip (t) || SkipDueToAttribute (t))
continue;
FieldInfo fi = t.GetField ("class_ptr", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static);
if (fi == null)
continue; // e.g. *Delegate
IntPtr class_ptr = (IntPtr) fi.GetValue (null);
foreach (var m in t.GetMethods (BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static)) {
if (SkipDueToAttribute (m))
continue;
foreach (object ca in m.GetCustomAttributes (true)) {
if (ca is ExportAttribute) {
string name = (ca as ExportAttribute).Selector;
bool result = Messaging.bool_objc_msgSend_IntPtr (class_ptr, responds_handle, Selector.GetHandle (name));
bool response = CheckStaticResponse (result, t, m.DeclaringType, ref name);
if (!ContinueOnFailure)
Assert.IsTrue (response, name);
else if (!response) {
Console.WriteLine ("[FAIL] {0}", name);
Errors++;
}
n++;
}
}
}
}
Assert.AreEqual (0, Errors, "{0} errors found in {1} static selector validated", Errors, n);
}
protected virtual bool HasNoSetter (PropertyInfo p)
{
return false;
}
[Test]
public void MissingSetters ()
{
Errors = 0;
int n = 0;
IntPtr responds_handle = Selector.GetHandle ("instancesRespondToSelector:");
foreach (Type t in Assembly.GetTypes ()) {
if (t.IsNested || !NSObjectType.IsAssignableFrom (t))
continue;
if (Skip (t))
continue;
// static properties
FieldInfo fi = t.GetField ("class_ptr", BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static);
if (fi == null)
continue; // e.g. *Delegate
IntPtr class_ptr = (IntPtr) fi.GetValue (null);
foreach (var p in t.GetProperties (BindingFlags.NonPublic | BindingFlags.Public | BindingFlags.Static | BindingFlags.Instance)) {
if (p.DeclaringType != t || SkipDueToAttribute (p))
continue;
var mg = p.GetGetMethod ();
var ms = p.GetSetMethod ();
if (HasNoSetter (p) || (mg == null) || (ms != null))
continue;
if (SkipDueToAttribute (mg) || SkipDueToAttribute (ms))
continue;
foreach (object ca in mg.GetCustomAttributes (true)) {
if (ca is ExportAttribute) {
// if getter has [Export] assume it's valid (there's a test for this)
string name = (ca as ExportAttribute).Selector;
if (!Messaging.bool_objc_msgSend_IntPtr (class_ptr, responds_handle, Selector.GetHandle (name)))
continue;
n++;
string setter_selector = String.Format ("set{0}{1}:", Char.ToUpperInvariant (name [0]), name.Substring (1));
if (LogProgress)
Console.WriteLine ("{0} {1} '{2} {3}' selector: {4}", n, t.Name, mg.IsStatic ? "static" : "instance", p, setter_selector);
bool result = !Messaging.bool_objc_msgSend_IntPtr (class_ptr, responds_handle, Selector.GetHandle (setter_selector));
if (!ContinueOnFailure)
Assert.IsTrue (result, t.Name + " - " + setter_selector);
else if (!result) {
Console.WriteLine ("[FAIL] {0} {1}", t, setter_selector);
Errors++;
}
}
}
}
}
Assert.AreEqual (0, Errors, "{0} potential errors found in {1} setters validated", Errors, n);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using System.Diagnostics;
using System.Runtime.InteropServices;
namespace SuperPMICollection
{
public class SpmiException : Exception
{
public SpmiException() : base()
{ }
public SpmiException(string message)
: base(message)
{ }
public SpmiException(string message, Exception innerException)
: base(message, innerException)
{ }
}
internal class Global
{
// Arguments to the program. These should not be touched by Initialize(), as they are set earlier than that.
internal static bool SkipCleanup = false; // Should we skip all cleanup? That is, should we keep all temporary files? Useful for debugging.
// Computed values based on the environment and platform.
internal static bool IsWindows { get; private set; }
internal static bool IsOSX { get; private set; }
internal static bool IsLinux { get; private set; }
internal static string CoreRoot { get; private set; }
internal static string StandaloneJitName { get; private set; }
internal static string CollectorShimName { get; private set; }
internal static string SuperPmiToolName { get; private set; }
internal static string McsToolName { get; private set; }
internal static string JitPath { get; private set; } // Path to the standalone JIT
internal static string SuperPmiPath { get; private set; } // Path to superpmi.exe
internal static string McsPath { get; private set; } // Path to mcs.exe
// Initialize the global state. Don't use a class constructor, because we might throw exceptions
// that we want to catch.
public static void Initialize()
{
string core_root_raw = System.Environment.GetEnvironmentVariable("CORE_ROOT");
if (String.IsNullOrEmpty(core_root_raw))
{
throw new SpmiException("Environment variable CORE_ROOT is not set");
}
try
{
CoreRoot = System.IO.Path.GetFullPath(core_root_raw);
}
catch (Exception ex)
{
throw new SpmiException("Illegal CORE_ROOT environment variable (" + core_root_raw + "), exception: " + ex.Message);
}
IsWindows = RuntimeInformation.IsOSPlatform(OSPlatform.Windows);
IsOSX = RuntimeInformation.IsOSPlatform(OSPlatform.OSX);
IsLinux = RuntimeInformation.IsOSPlatform(OSPlatform.Linux);
if (IsWindows)
{
StandaloneJitName = "clrjit.dll";
CollectorShimName = "superpmi-shim-collector.dll";
SuperPmiToolName = "superpmi.exe";
McsToolName = "mcs.exe";
}
else if (IsLinux)
{
StandaloneJitName = "libclrjit.so";
CollectorShimName = "libsuperpmi-shim-collector.so";
SuperPmiToolName = "superpmi";
McsToolName = "mcs";
}
else if (IsOSX)
{
StandaloneJitName = "libclrjit.dylib";
CollectorShimName = "libsuperpmi-shim-collector.dylib";
SuperPmiToolName = "superpmi";
McsToolName = "mcs";
}
else
{
throw new SpmiException("Unknown platform");
}
JitPath = Path.Combine(CoreRoot, StandaloneJitName);
SuperPmiPath = Path.Combine(CoreRoot, SuperPmiToolName);
McsPath = Path.Combine(CoreRoot, McsToolName);
}
}
internal class SuperPMICollectionClass
{
private static string s_tempDir = null; // Temporary directory where we will put the MC files, MCH files, MCL files, and TOC.
private static string s_baseFailMclFile = null; // Pathname for a temporary .MCL file used for noticing superpmi replay failures against base MCH.
private static string s_finalFailMclFile = null; // Pathname for a temporary .MCL file used for noticing superpmi replay failures against final MCH.
private static string s_baseMchFile = null; // The base .MCH file path
private static string s_cleanMchFile = null; // The clean .MCH file path
private static string s_finalMchFile = null; // The clean thin unique .MCH file path
private static string s_tocFile = null; // The .TOC file path for the clean thin unique .MCH file
private static string s_errors = ""; // Collect non-fatal file delete errors to display at the end of the collection process.
private static bool s_saveFinalMchFile = false; // Should we save the final MCH file, or delete it?
private static void SafeFileDelete(string filePath)
{
try
{
File.Delete(filePath);
}
catch(Exception ex)
{
string err = string.Format("Error deleting file \"{0}\": {1}", filePath, ex.Message);
s_errors += err + System.Environment.NewLine;
Console.Error.WriteLine(err);
}
}
private static void CreateTempDirectory(string tempPath)
{
if (tempPath == null)
{
tempPath = Path.GetTempPath();
}
s_tempDir = Path.Combine(tempPath, Path.GetRandomFileName() + "SPMI");
if (Directory.Exists(s_tempDir))
{
throw new SpmiException("temporary directory already exists: " + s_tempDir);
}
DirectoryInfo di = Directory.CreateDirectory(s_tempDir);
}
private static void ChooseFilePaths(string outputMchPath)
{
s_baseFailMclFile = Path.Combine(s_tempDir, "basefail.mcl");
s_finalFailMclFile = Path.Combine(s_tempDir, "finalfail.mcl");
s_baseMchFile = Path.Combine(s_tempDir, "base.mch");
s_cleanMchFile = Path.Combine(s_tempDir, "clean.mch");
if (outputMchPath == null)
{
s_saveFinalMchFile = false;
s_finalMchFile = Path.Combine(s_tempDir, "final.mch");
s_tocFile = Path.Combine(s_tempDir, "final.mch.mct");
}
else
{
s_saveFinalMchFile = true;
s_finalMchFile = Path.GetFullPath(outputMchPath);
s_tocFile = s_finalMchFile + ".mct";
}
}
private static int RunProgram(string program, string arguments)
{
// If the program is a script, move the program name into the arguments, and run it
// under the appropriate shell.
if (Global.IsWindows)
{
if ((program.LastIndexOf(".bat") != -1) || (program.LastIndexOf(".cmd") != -1))
{
string programArgumentSep = String.IsNullOrEmpty(arguments) ? "" : " ";
arguments = "/c " + program + programArgumentSep + arguments;
program = Environment.GetEnvironmentVariable("ComSpec"); // path to CMD.exe
}
}
else
{
if (program.LastIndexOf(".sh") != -1)
{
string programArgumentSep = String.IsNullOrEmpty(arguments) ? "" : " ";
arguments = "bash " + program + programArgumentSep + arguments;
program = "/usr/bin/env";
}
}
Console.WriteLine("Running: " + program + " " + arguments);
Process p = Process.Start(program, arguments);
p.WaitForExit();
return p.ExitCode;
}
// Run a single test from the coreclr test binary drop.
// This works even if given a test path in Windows file system format (e.g.,
// "c:\foo\bar\runit.cmd") when run on Unix. It converts to Unix path format and replaces
// the ".cmd" with ".sh" before attempting to run the script.
private static void RunTest(string testName)
{
string testDir;
if (Global.IsWindows)
{
int lastIndex = testName.LastIndexOf("\\");
if (lastIndex == -1)
{
throw new SpmiException("test path doesn't have any directory separators? " + testName);
}
testDir = testName.Substring(0, lastIndex);
}
else
{
// Just in case we've been given a test name in Windows format, convert it to Unix format here.
testName = testName.Replace("\\", "/");
testName = testName.Replace(".cmd", ".sh");
testName = testName.Replace(".bat", ".sh");
// The way tests are run on Linux, we might need to do some setup. In particular,
// if the test scripts are copied from Windows, we need to convert line endings
// to Unix line endings, and make the script executable. We can always do this
// more than once. This same transformation is done in runtest.sh.
// Review: RunProgram doesn't seem to work if the program isn't a full path.
RunProgram("/usr/bin/perl", @"-pi -e 's/\r\n|\n|\r/\n/g' " + "\"" + testName + "\"");
RunProgram("/bin/chmod", "+x \"" + testName + "\"");
// Now, figure out how to run the test.
int lastIndex = testName.LastIndexOf("/");
if (lastIndex == -1)
{
throw new SpmiException("test path doesn't have any directory separators? " + testName);
}
testDir = testName.Substring(0, lastIndex);
}
// Run the script in the same directory where the test lives.
string originalDir = Directory.GetCurrentDirectory();
Directory.SetCurrentDirectory(testDir);
try
{
RunProgram(testName, "");
}
finally
{
// Restore the original current directory from before the test run.
Directory.SetCurrentDirectory(originalDir);
}
}
// Run all the programs from the CoreCLR test binary drop we wish to run while collecting MC files.
private static void RunTestProgramsWhileCollecting()
{
// The list of all programs from the CoreCLR repo test binary drop that
// we should run when doing the SuperPMI collection. This is currently a
// hard-coded list of the relative paths within the test build binaries
// directory of the Windows .cmd files used to run a test. For non-Windows
// platforms, the .cmd is replaced by .sh, and the path separator character
// is changed.
//
// TODO: this should probably be loaded dynamically from a .json/.xml file.
//
// Note: We previously used
// JIT\Performance\CodeQuality\Roslyn\CscBench\CscBench.cmd
// but it doesn't currently run on x86 due to this issue: https://github.com/dotnet/coreclr/issues/6844.
string[] SuperPMICollectionTestProgramsList =
{
@"JIT\Performance\CodeQuality\Bytemark\Bytemark\Bytemark.cmd",
@"JIT\Methodical\fp\exgen\10w5d_cs_do\10w5d_cs_do.cmd",
@"JIT\Generics\Coverage\chaos56200037cs\chaos56200037cs.cmd"
};
// Figure out the root of the test binaries directory.
// Perhaps this (or something similar) would be a better way to figure out the binary root dir:
// testBinaryRootDir = System.IO.Path.GetDirectoryName(new Uri(Assembly.GetExecutingAssembly().CodeBase).LocalPath);
string thisTestDir = Directory.GetCurrentDirectory();
int lastIndex = thisTestDir.LastIndexOf("JIT");
if (lastIndex == -1)
{
throw new SpmiException("we expect the current directory when the test is run to be within the JIT test binaries tree, but it is not: " + thisTestDir);
}
string testBinaryRootDir = thisTestDir.Substring(0, lastIndex);
// Run the tests
foreach (string test in SuperPMICollectionTestProgramsList)
{
string testFullPath = Path.Combine(testBinaryRootDir, test);
try
{
RunTest(testFullPath);
}
catch (SpmiException ex)
{
// Ignore failures running the test. We don't really care if they pass or not
// as long as they generate some .MC files. Plus, I'm not sure how confident
// we can be in getting a correct error code.
Console.Error.WriteLine("WARNING: test failed (ignoring): " + ex.Message);
}
}
}
// Run all the programs we wish to run while collecting MC files.
private static void RunProgramsWhileCollecting(string runProgramPath, string runProgramArguments)
{
if (runProgramPath == null)
{
// No program was given to use for collection, so use our default set.
RunTestProgramsWhileCollecting();
}
else
{
RunProgram(runProgramPath, runProgramArguments);
}
}
// Collect MC files:
// a. Set environment variables
// b. Run tests
// c. Un-set environment variables
// d. Check that something was generated
private static void CollectMCFiles(string runProgramPath, string runProgramArguments)
{
// Set environment variables.
Console.WriteLine("Setting environment variables:");
Console.WriteLine(" SuperPMIShimLogPath=" + s_tempDir);
Console.WriteLine(" SuperPMIShimPath=" + Global.JitPath);
Console.WriteLine(" COMPlus_AltJit=*");
Console.WriteLine(" COMPlus_AltJitName=" + Global.CollectorShimName);
Environment.SetEnvironmentVariable("SuperPMIShimLogPath", s_tempDir);
Environment.SetEnvironmentVariable("SuperPMIShimPath", Global.JitPath);
Environment.SetEnvironmentVariable("COMPlus_AltJit", "*");
Environment.SetEnvironmentVariable("COMPlus_AltJitName", Global.CollectorShimName);
RunProgramsWhileCollecting(runProgramPath, runProgramArguments);
// Un-set environment variables
Environment.SetEnvironmentVariable("SuperPMIShimLogPath", "");
Environment.SetEnvironmentVariable("SuperPMIShimPath", "");
Environment.SetEnvironmentVariable("COMPlus_AltJit", "");
Environment.SetEnvironmentVariable("COMPlus_AltJitName", "");
// Did any .mc files get generated?
string[] mcFiles = Directory.GetFiles(s_tempDir, "*.mc");
if (mcFiles.Length == 0)
{
throw new SpmiException("no .mc files generated");
}
}
// Merge MC files:
// mcs -merge <s_baseMchFile> <s_tempDir>\*.mc -recursive
private static void MergeMCFiles()
{
string pattern = Path.Combine(s_tempDir, "*.mc");
RunProgram(Global.McsPath, "-merge " + s_baseMchFile + " " + pattern + " -recursive");
if (!File.Exists(s_baseMchFile))
{
throw new SpmiException("file missing: " + s_baseMchFile);
}
if (!Global.SkipCleanup)
{
// All the individual MC files are no longer necessary, now that we've merged them into the base.mch. Delete them.
string[] mcFiles = Directory.GetFiles(s_tempDir, "*.mc");
foreach (string mcFile in mcFiles)
{
SafeFileDelete(mcFile);
}
}
}
// Create clean MCH file:
// <superPmiPath> -p -f <s_baseFailMclFile> <s_baseMchFile> <jitPath>
// if <s_baseFailMclFile> is non-empty:
// <mcl> -strip <s_baseFailMclFile> <s_baseMchFile> <s_cleanMchFile>
// else:
// s_cleanMchFile = s_baseMchFile // no need to copy; just change string names (and null out s_baseMchFile so we don't try to delete twice)
// del <s_baseFailMclFile>
private static void CreateCleanMCHFile()
{
RunProgram(Global.SuperPmiPath, "-p -f " + s_baseFailMclFile + " " + s_baseMchFile + " " + Global.JitPath);
if (File.Exists(s_baseFailMclFile) && !String.IsNullOrEmpty(File.ReadAllText(s_baseFailMclFile)))
{
RunProgram(Global.McsPath, "-strip " + s_baseMchFile + " " + s_cleanMchFile);
}
else
{
// Instead of stripping the file, just set s_cleanMchFile = s_baseMchFile and
// null out s_baseMchFile so we don't try to delete the same file twice.
// Note that we never use s_baseMchFile after this function is called.
s_cleanMchFile = s_baseMchFile;
s_baseMchFile = null;
}
if (!File.Exists(s_cleanMchFile))
{
throw new SpmiException("file missing: " + s_cleanMchFile);
}
if (!Global.SkipCleanup)
{
if (File.Exists(s_baseFailMclFile))
{
SafeFileDelete(s_baseFailMclFile);
s_baseFailMclFile = null;
}
// The base file is no longer used (unless there was no cleaning done, in which case
// s_baseMchFile has been null-ed and s_cleanMchFile points at the base file).
if ((s_baseMchFile != null) && File.Exists(s_baseMchFile))
{
SafeFileDelete(s_baseMchFile);
s_baseMchFile = null;
}
}
}
// Create a thin unique MCH:
// <mcl> -removeDup -thin <s_cleanMchFile> <s_finalMchFile>
private static void CreateThinUniqueMCH()
{
RunProgram(Global.McsPath, "-removeDup -thin " + s_cleanMchFile + " " + s_finalMchFile);
if (!File.Exists(s_finalMchFile))
{
throw new SpmiException("file missing: " + s_finalMchFile);
}
if (!Global.SkipCleanup)
{
// The clean file is no longer used; delete it.
if ((s_cleanMchFile != null) && File.Exists(s_cleanMchFile))
{
SafeFileDelete(s_cleanMchFile);
s_cleanMchFile = null;
}
}
}
// Create a TOC file:
// <mcl> -toc <s_finalMchFile>
// // check that .mct file was created
private static void CreateTOC()
{
RunProgram(Global.McsPath, "-toc " + s_finalMchFile);
if (!File.Exists(s_tocFile))
{
throw new SpmiException("file missing: " + s_tocFile);
}
}
// Verify the resulting MCH file is error-free when running superpmi against it with the same JIT used for collection.
// <superPmiPath> -p -f <s_finalFailMclFile> <s_finalMchFile> <jitPath>
// if <s_finalFailMclFile> is non-empty:
// // error!
private static void VerifyFinalMCH()
{
RunProgram(Global.SuperPmiPath, "-p -f " + s_finalFailMclFile + " " + s_finalMchFile + " " + Global.JitPath);
if (!File.Exists(s_finalFailMclFile) || !String.IsNullOrEmpty(File.ReadAllText(s_finalFailMclFile)))
{
throw new SpmiException("replay of final file is not error free");
}
if (!Global.SkipCleanup)
{
if (File.Exists(s_finalFailMclFile))
{
SafeFileDelete(s_finalFailMclFile);
s_finalFailMclFile = null;
}
}
}
// Cleanup. If we get here due to a failure of some kind, we want to do full cleanup. If we get here as part
// of normal shutdown processing, we want to keep the s_finalMchFile and s_tocFile if s_saveFinalMchFile == true.
// del <s_baseMchFile>
// del <s_cleanMchFile>
// del <s_finalMchFile>
// del <s_tocFile>
// rmdir <s_tempDir>
private static void Cleanup()
{
if (Global.SkipCleanup)
return;
try
{
if ((s_baseFailMclFile != null) && File.Exists(s_baseFailMclFile))
{
SafeFileDelete(s_baseFailMclFile);
s_baseFailMclFile = null;
}
if ((s_baseMchFile != null) && File.Exists(s_baseMchFile))
{
SafeFileDelete(s_baseMchFile);
s_baseMchFile = null;
}
if ((s_cleanMchFile != null) && File.Exists(s_cleanMchFile))
{
SafeFileDelete(s_cleanMchFile);
s_cleanMchFile = null;
}
if (!s_saveFinalMchFile)
{
// Note that if we fail to create the TOC, but we already
// successfully created the MCH file, and the user wants to
// keep the final result, then we will still keep the final
// MCH file. We'll also keep it if the verify pass fails.
if ((s_finalMchFile != null) && File.Exists(s_finalMchFile))
{
SafeFileDelete(s_finalMchFile);
}
if ((s_tocFile != null) && File.Exists(s_tocFile))
{
SafeFileDelete(s_tocFile);
}
}
if ((s_finalFailMclFile != null) && File.Exists(s_finalFailMclFile))
{
SafeFileDelete(s_finalFailMclFile);
s_finalFailMclFile = null;
}
if ((s_tempDir != null) && Directory.Exists(s_tempDir))
{
Directory.Delete(s_tempDir, /* delete recursively */ true);
}
}
catch (Exception ex)
{
Console.Error.WriteLine("ERROR during cleanup: " + ex.Message);
}
}
public static int Collect(string outputMchPath, string runProgramPath, string runProgramArguments, string tempPath)
{
// Do a basic SuperPMI collect and validation:
// 1. Collect MC files by running a set of sample apps.
// 2. Merge the MC files into a single MCH using "mcs -merge *.mc -recursive".
// 3. Create a clean MCH by running superpmi over the MCH, and using "mcs -strip" to filter
// out any failures (if any).
// 4. Create a thin unique MCH by using "mcs -removeDup -thin".
// 5. Create a TOC using "mcs -toc".
// 6. Verify the resulting MCH file is error-free when running superpmi against it with the
// same JIT used for collection.
//
// MCH files are big. If we don't need them anymore, clean them up right away to avoid
// running out of disk space in disk constrained situations.
string thisTask = "SuperPMI collection and playback";
Console.WriteLine(thisTask + " - BEGIN");
int result = 101; // assume error (!= 100)
try
{
CreateTempDirectory(tempPath);
ChooseFilePaths(outputMchPath);
CollectMCFiles(runProgramPath, runProgramArguments);
MergeMCFiles();
CreateCleanMCHFile();
CreateThinUniqueMCH();
CreateTOC();
VerifyFinalMCH();
// Success!
result = 100;
}
catch (SpmiException ex)
{
Console.Error.WriteLine("ERROR: " + ex.Message);
result = 101;
}
catch (Exception ex)
{
Console.Error.WriteLine("ERROR: unknown exception running collection: " + ex.Message);
result = 101;
}
finally
{
Cleanup();
}
// Re-display the file delete errors, if any, in case they got lost in the output so far.
if (!String.IsNullOrEmpty(s_errors))
{
Console.Error.WriteLine("Non-fatal errors occurred during processing:");
Console.Error.Write(s_errors);
}
if (result == 100)
{
Console.WriteLine(thisTask + " - SUCCESS");
}
else
{
Console.WriteLine(thisTask + " - FAILED");
}
return result;
}
}
internal class Program
{
private static void Usage()
{
// Unfortunately, under CoreCLR, this just gets is the path to CoreRun.exe:
// string thisProgram = System.Diagnostics.Process.GetCurrentProcess().MainModule.FileName;
string thisProgram = "superpmicollect";
Console.WriteLine("Usage: {0} [arguments]", thisProgram);
Console.WriteLine(" where [arguments] is zero or more of:");
Console.WriteLine(" -? | -help : Display this help text.");
Console.WriteLine(" -mch <file> : Specify the name of the generated clean/thin/unique MCH file.");
Console.WriteLine(" The MCH file is retained (by default, the final MCH file is deleted).");
Console.WriteLine(" -run <program> [arguments...] : This program (or script) is invoked to run any number");
Console.WriteLine(" of programs during MC collection. All arguments after");
Console.WriteLine(" <program> are passed to <program> as its arguments.");
Console.WriteLine(" Thus, -run must be the last argument.");
Console.WriteLine(" -skipCleanup : Do not delete any intermediate files created during processing.");
Console.WriteLine(" -temp <dir> : A newly created, randomly-named, subdirectory of this");
Console.WriteLine(" directory will be used to store all temporary files.");
Console.WriteLine(" By default, the user temporary directory is used");
Console.WriteLine(" (%TEMP% on Windows, /tmp on Unix).");
Console.WriteLine(" Since SuperPMI collections generate a lot of data, this option");
Console.WriteLine(" is useful if the normal temporary directory doesn't have enough space.");
Console.WriteLine("");
Console.WriteLine("This program performs a collection of SuperPMI data. With no arguments, a hard-coded list of");
Console.WriteLine("programs are run during collection. With the -run argument, the user species which apps are run.");
Console.WriteLine("");
Console.WriteLine("If -mch is not given, all generated files are deleted, and the result is simply the exit code");
Console.WriteLine("indicating whether the collection succeeded. This is useful as a test.");
Console.WriteLine("");
Console.WriteLine("If the COMPlus_AltJit variable is already set, it is assumed SuperPMI collection is already happening,");
Console.WriteLine("and the program exits with success.");
Console.WriteLine("");
Console.WriteLine("On success, the return code is 100.");
}
private static int Main(string[] args)
{
string outputMchPath = null;
string runProgramPath = null;
string runProgramArguments = null;
string tempPath = null;
// Parse arguments
if (args.Length > 0)
{
for (int i = 0; i < args.Length; i++)
{
switch (args[i])
{
default:
Usage();
return 101;
case "-?":
Usage();
return 101;
case "-help":
Usage();
return 101;
case "-skipCleanup":
Global.SkipCleanup = true;
break;
case "-mch":
i++;
if (i >= args.Length)
{
Console.Error.WriteLine("Error: missing argument to -mch");
Usage();
return 101;
}
outputMchPath = args[i];
if (!outputMchPath.EndsWith(".mch"))
{
// We need the resulting file to end with ".mch". If the user didn't specify this, then simply add it.
// Thus, if the user specifies "-mch foo", we'll generate foo.mch (and TOC file foo.mch.mct).
outputMchPath += ".mch";
}
outputMchPath = Path.GetFullPath(outputMchPath);
break;
case "-run":
i++;
if (i >= args.Length)
{
Console.Error.WriteLine("Error: missing argument to -run");
Usage();
return 101;
}
runProgramPath = Path.GetFullPath(args[i]);
if (!File.Exists(runProgramPath))
{
Console.Error.WriteLine("Error: couldn't find program {0}", runProgramPath);
return 101;
}
// The rest of the arguments, if any, are passed as arguments to the run program.
i++;
if (i < args.Length)
{
string[] runArgumentsArray = new string[args.Length - i];
for (int j = 0; i < args.Length; i++, j++)
{
runArgumentsArray[j] = args[i];
}
runProgramArguments = string.Join(" ", runArgumentsArray);
}
break;
case "-temp":
i++;
if (i >= args.Length)
{
Console.Error.WriteLine("Error: missing argument to -temp");
Usage();
return 101;
}
tempPath = args[i];
break;
}
}
}
// Done with argument parsing.
string altjitvar = System.Environment.GetEnvironmentVariable("COMPlus_AltJit");
if (!String.IsNullOrEmpty(altjitvar))
{
// Someone already has the COMPlus_AltJit variable set. We don't want to override
// that. Perhaps someone is already doing a SuperPMI collection and invokes this
// program as part of a full test path in which this program exists.
Console.WriteLine("COMPlus_AltJit already exists: skipping SuperPMI collection and returning success");
return 100;
}
int result;
try
{
Global.Initialize();
result = SuperPMICollectionClass.Collect(outputMchPath, runProgramPath, runProgramArguments, tempPath);
}
catch (SpmiException ex)
{
Console.Error.WriteLine("ERROR: " + ex.Message);
result = 101;
}
catch (Exception ex)
{
Console.Error.WriteLine("ERROR: unknown exception running collection: " + ex.Message);
result = 101;
}
return result;
}
}
}
| |
/*
* Copyright 2007 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using System.Collections.Generic;
using System.Text;
using ZXing.Common;
namespace ZXing.QrCode.Internal
{
/// <summary> <p>QR Codes can encode text as bits in one of several modes, and can use multiple modes
/// in one QR Code. This class decodes the bits back into text.</p>
///
/// <p>See ISO 18004:2006, 6.4.3 - 6.4.7</p>
/// <author>Sean Owen</author>
/// </summary>
internal static class DecodedBitStreamParser
{
/// <summary>
/// See ISO 18004:2006, 6.4.4 Table 5
/// </summary>
private static readonly char[] ALPHANUMERIC_CHARS = {
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'A', 'B',
'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N',
'O', 'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z',
' ', '$', '%', '*', '+', '-', '.', '/', ':'
};
private const int GB2312_SUBSET = 1;
internal static DecoderResult decode(byte[] bytes,
Version version,
ErrorCorrectionLevel ecLevel,
IDictionary<DecodeHintType, object> hints)
{
var bits = new BitSource(bytes);
var result = new StringBuilder(50);
var byteSegments = new List<byte[]>(1);
var symbolSequence = -1;
var parityData = -1;
try
{
CharacterSetECI currentCharacterSetECI = null;
bool fc1InEffect = false;
Mode mode;
do
{
// While still another segment to read...
if (bits.available() < 4)
{
// OK, assume we're done. Really, a TERMINATOR mode should have been recorded here
mode = Mode.TERMINATOR;
}
else
{
try
{
mode = Mode.forBits(bits.readBits(4)); // mode is encoded by 4 bits
}
catch (ArgumentException)
{
return null;
}
}
if (mode != Mode.TERMINATOR)
{
if (mode == Mode.FNC1_FIRST_POSITION || mode == Mode.FNC1_SECOND_POSITION)
{
// We do little with FNC1 except alter the parsed result a bit according to the spec
fc1InEffect = true;
}
else if (mode == Mode.STRUCTURED_APPEND)
{
if (bits.available() < 16)
{
return null;
}
// not really supported; but sequence number and parity is added later to the result metadata
// Read next 8 bits (symbol sequence #) and 8 bits (parity data), then continue
symbolSequence = bits.readBits(8);
parityData = bits.readBits(8);
}
else if (mode == Mode.ECI)
{
// Count doesn't apply to ECI
int value = parseECIValue(bits);
currentCharacterSetECI = CharacterSetECI.getCharacterSetECIByValue(value);
if (currentCharacterSetECI == null)
{
return null;
}
}
else
{
// First handle Hanzi mode which does not start with character count
if (mode == Mode.HANZI)
{
//chinese mode contains a sub set indicator right after mode indicator
int subset = bits.readBits(4);
int countHanzi = bits.readBits(mode.getCharacterCountBits(version));
if (subset == GB2312_SUBSET)
{
if (!decodeHanziSegment(bits, result, countHanzi))
return null;
}
}
else
{
// "Normal" QR code modes:
// How many characters will follow, encoded in this mode?
int count = bits.readBits(mode.getCharacterCountBits(version));
if (mode == Mode.NUMERIC)
{
if (!decodeNumericSegment(bits, result, count))
return null;
}
else if (mode == Mode.ALPHANUMERIC)
{
if (!decodeAlphanumericSegment(bits, result, count, fc1InEffect))
return null;
}
else if (mode == Mode.BYTE)
{
if (!decodeByteSegment(bits, result, count, currentCharacterSetECI, byteSegments, hints))
return null;
}
else if (mode == Mode.KANJI)
{
if (!decodeKanjiSegment(bits, result, count))
return null;
}
else
{
return null;
}
}
}
}
} while (mode != Mode.TERMINATOR);
}
catch (ArgumentException)
{
// from readBits() calls
return null;
}
#if WindowsCE
var resultString = result.ToString().Replace("\n", "\r\n");
#else
var resultString = result.ToString().Replace("\r\n", "\n").Replace("\n", Environment.NewLine);
#endif
return new DecoderResult(bytes,
resultString,
byteSegments.Count == 0 ? null : byteSegments,
ecLevel == null ? null : ecLevel.ToString(),
symbolSequence, parityData);
}
/// <summary>
/// See specification GBT 18284-2000
/// </summary>
/// <param name="bits">The bits.</param>
/// <param name="result">The result.</param>
/// <param name="count">The count.</param>
/// <returns></returns>
private static bool decodeHanziSegment(BitSource bits,
StringBuilder result,
int count)
{
// Don't crash trying to read more bits than we have available.
if (count * 13 > bits.available())
{
return false;
}
// Each character will require 2 bytes. Read the characters as 2-byte pairs
// and decode as GB2312 afterwards
byte[] buffer = new byte[2 * count];
int offset = 0;
while (count > 0)
{
// Each 13 bits encodes a 2-byte character
int twoBytes = bits.readBits(13);
int assembledTwoBytes = ((twoBytes / 0x060) << 8) | (twoBytes % 0x060);
if (assembledTwoBytes < 0x003BF)
{
// In the 0xA1A1 to 0xAAFE range
assembledTwoBytes += 0x0A1A1;
}
else
{
// In the 0xB0A1 to 0xFAFE range
assembledTwoBytes += 0x0A6A1;
}
buffer[offset] = (byte)((assembledTwoBytes >> 8) & 0xFF);
buffer[offset + 1] = (byte)(assembledTwoBytes & 0xFF);
offset += 2;
count--;
}
try
{
result.Append(Encoding.GetEncoding(StringUtils.GB2312).GetString(buffer, 0, buffer.Length));
}
#if (WINDOWS_PHONE70 || WINDOWS_PHONE71 || SILVERLIGHT4 || SILVERLIGHT5 || NETFX_CORE || MONOANDROID || MONOTOUCH)
catch (ArgumentException)
{
try
{
// Silverlight only supports a limited number of character sets, trying fallback to UTF-8
result.Append(Encoding.GetEncoding("UTF-8").GetString(buffer, 0, buffer.Length));
}
catch (Exception)
{
return false;
}
}
#endif
catch (Exception)
{
return false;
}
return true;
}
private static bool decodeKanjiSegment(BitSource bits,
StringBuilder result,
int count)
{
// Don't crash trying to read more bits than we have available.
if (count * 13 > bits.available())
{
return false;
}
// Each character will require 2 bytes. Read the characters as 2-byte pairs
// and decode as Shift_JIS afterwards
byte[] buffer = new byte[2 * count];
int offset = 0;
while (count > 0)
{
// Each 13 bits encodes a 2-byte character
int twoBytes = bits.readBits(13);
int assembledTwoBytes = ((twoBytes / 0x0C0) << 8) | (twoBytes % 0x0C0);
if (assembledTwoBytes < 0x01F00)
{
// In the 0x8140 to 0x9FFC range
assembledTwoBytes += 0x08140;
}
else
{
// In the 0xE040 to 0xEBBF range
assembledTwoBytes += 0x0C140;
}
buffer[offset] = (byte)(assembledTwoBytes >> 8);
buffer[offset + 1] = (byte)assembledTwoBytes;
offset += 2;
count--;
}
// Shift_JIS may not be supported in some environments:
try
{
result.Append(Encoding.GetEncoding(StringUtils.SHIFT_JIS).GetString(buffer, 0, buffer.Length));
}
#if (WINDOWS_PHONE70 || WINDOWS_PHONE71 || SILVERLIGHT4 || SILVERLIGHT5 || NETFX_CORE || MONOANDROID || MONOTOUCH)
catch (ArgumentException)
{
try
{
// Silverlight only supports a limited number of character sets, trying fallback to UTF-8
result.Append(Encoding.GetEncoding("UTF-8").GetString(buffer, 0, buffer.Length));
}
catch (Exception)
{
return false;
}
}
#endif
catch (Exception)
{
return false;
}
return true;
}
private static bool decodeByteSegment(BitSource bits,
StringBuilder result,
int count,
CharacterSetECI currentCharacterSetECI,
IList<byte[]> byteSegments,
IDictionary<DecodeHintType, object> hints)
{
// Don't crash trying to read more bits than we have available.
if (count << 3 > bits.available())
{
return false;
}
byte[] readBytes = new byte[count];
for (int i = 0; i < count; i++)
{
readBytes[i] = (byte)bits.readBits(8);
}
String encoding;
if (currentCharacterSetECI == null)
{
// The spec isn't clear on this mode; see
// section 6.4.5: t does not say which encoding to assuming
// upon decoding. I have seen ISO-8859-1 used as well as
// Shift_JIS -- without anything like an ECI designator to
// give a hint.
encoding = StringUtils.guessEncoding(readBytes, hints);
}
else
{
encoding = currentCharacterSetECI.EncodingName;
}
try
{
result.Append(Encoding.GetEncoding(encoding).GetString(readBytes, 0, readBytes.Length));
}
#if (WINDOWS_PHONE70 || WINDOWS_PHONE71 || SILVERLIGHT4 || SILVERLIGHT5 || NETFX_CORE || MONOANDROID || MONOTOUCH)
catch (ArgumentException)
{
try
{
// Silverlight only supports a limited number of character sets, trying fallback to UTF-8
result.Append(Encoding.GetEncoding("UTF-8").GetString(readBytes, 0, readBytes.Length));
}
catch (Exception)
{
return false;
}
}
#endif
#if WindowsCE
catch (PlatformNotSupportedException)
{
try
{
// WindowsCE doesn't support all encodings. But it is device depended.
// So we try here the some different ones
if (encoding == "ISO-8859-1")
{
result.Append(Encoding.GetEncoding(1252).GetString(readBytes, 0, readBytes.Length));
}
else
{
result.Append(Encoding.GetEncoding("UTF-8").GetString(readBytes, 0, readBytes.Length));
}
}
catch (Exception)
{
return false;
}
}
#endif
catch (Exception)
{
return false;
}
byteSegments.Add(readBytes);
return true;
}
private static char toAlphaNumericChar(int value)
{
if (value >= ALPHANUMERIC_CHARS.Length)
{
throw FormatException.Instance;
}
return ALPHANUMERIC_CHARS[value];
}
private static bool decodeAlphanumericSegment(BitSource bits,
StringBuilder result,
int count,
bool fc1InEffect)
{
// Read two characters at a time
int start = result.Length;
while (count > 1)
{
if (bits.available() < 11)
{
return false;
}
int nextTwoCharsBits = bits.readBits(11);
result.Append(toAlphaNumericChar(nextTwoCharsBits / 45));
result.Append(toAlphaNumericChar(nextTwoCharsBits % 45));
count -= 2;
}
if (count == 1)
{
// special case: one character left
if (bits.available() < 6)
{
return false;
}
result.Append(toAlphaNumericChar(bits.readBits(6)));
}
// See section 6.4.8.1, 6.4.8.2
if (fc1InEffect)
{
// We need to massage the result a bit if in an FNC1 mode:
for (int i = start; i < result.Length; i++)
{
if (result[i] == '%')
{
if (i < result.Length - 1 && result[i + 1] == '%')
{
// %% is rendered as %
result.Remove(i + 1, 1);
}
else
{
// In alpha mode, % should be converted to FNC1 separator 0x1D
result.Remove(i, 1);
result.Insert(i, new[] { (char)0x1D });
}
}
}
}
return true;
}
private static bool decodeNumericSegment(BitSource bits,
StringBuilder result,
int count)
{
// Read three digits at a time
while (count >= 3)
{
// Each 10 bits encodes three digits
if (bits.available() < 10)
{
return false;
}
int threeDigitsBits = bits.readBits(10);
if (threeDigitsBits >= 1000)
{
return false;
}
result.Append(toAlphaNumericChar(threeDigitsBits / 100));
result.Append(toAlphaNumericChar((threeDigitsBits / 10) % 10));
result.Append(toAlphaNumericChar(threeDigitsBits % 10));
count -= 3;
}
if (count == 2)
{
// Two digits left over to read, encoded in 7 bits
if (bits.available() < 7)
{
return false;
}
int twoDigitsBits = bits.readBits(7);
if (twoDigitsBits >= 100)
{
return false;
}
result.Append(toAlphaNumericChar(twoDigitsBits / 10));
result.Append(toAlphaNumericChar(twoDigitsBits % 10));
}
else if (count == 1)
{
// One digit left over to read
if (bits.available() < 4)
{
return false;
}
int digitBits = bits.readBits(4);
if (digitBits >= 10)
{
return false;
}
result.Append(toAlphaNumericChar(digitBits));
}
return true;
}
private static int parseECIValue(BitSource bits)
{
int firstByte = bits.readBits(8);
if ((firstByte & 0x80) == 0)
{
// just one byte
return firstByte & 0x7F;
}
if ((firstByte & 0xC0) == 0x80)
{
// two bytes
int secondByte = bits.readBits(8);
return ((firstByte & 0x3F) << 8) | secondByte;
}
if ((firstByte & 0xE0) == 0xC0)
{
// three bytes
int secondThirdBytes = bits.readBits(16);
return ((firstByte & 0x1F) << 16) | secondThirdBytes;
}
throw new ArgumentException("Bad ECI bits starting with byte " + firstByte);
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Threading.Tasks;
using Azure.Core;
using Azure.Core.Pipeline;
using Azure.Core.TestFramework;
using Azure.Search.Documents.Indexes;
using Azure.Search.Documents.Indexes.Models;
using NUnit.Framework;
namespace Azure.Search.Documents.Tests
{
public class SearchIndexClientTests : SearchTestBase
{
public SearchIndexClientTests(bool async, SearchClientOptions.ServiceVersion serviceVersion)
: base(async, serviceVersion, null /* RecordedTestMode.Record /* to re-record */)
{
}
[Test]
public void Constructor()
{
var serviceName = "my-svc-name";
var endpoint = new Uri($"https://{serviceName}.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
Assert.NotNull(service);
Assert.AreEqual(endpoint, service.Endpoint);
Assert.AreEqual(serviceName, service.ServiceName);
Assert.Throws<ArgumentNullException>(() => new SearchIndexClient(null, new AzureKeyCredential("fake")));
Assert.Throws<ArgumentNullException>(() => new SearchIndexClient(endpoint, null));
Assert.Throws<ArgumentException>(() => new SearchIndexClient(new Uri("http://bing.com"), null));
}
[Test]
public void GetSearchClient()
{
var serviceName = "my-svc-name";
var endpoint = new Uri($"https://{serviceName}.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
var indexName = "my-index-name";
var client = service.GetSearchClient(indexName);
Assert.NotNull(client);
Assert.AreEqual(endpoint, client.Endpoint);
Assert.AreEqual(serviceName, client.ServiceName);
Assert.AreEqual(indexName, client.IndexName);
Assert.Throws<ArgumentNullException>(() => service.GetSearchClient(null));
Assert.Throws<ArgumentException>(() => service.GetSearchClient(string.Empty));
}
private class TestPipelinePolicy : HttpPipelineSynchronousPolicy
{
public int RequestCount { get; private set; }
public override void OnSendingRequest(HttpMessage message) => RequestCount++;
}
[Test]
public async Task IndexSharesPipeline()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
TestPipelinePolicy custom = new TestPipelinePolicy();
Assert.AreEqual(0, custom.RequestCount);
SearchClientOptions options = new SearchClientOptions(ServiceVersion);
options.AddPolicy(custom, HttpPipelinePosition.PerCall);
SearchIndexClient serviceClient = resources.GetIndexClient(options);
SearchClient client = serviceClient.GetSearchClient(resources.IndexName);
_ = await client.GetDocumentCountAsync();
Assert.AreEqual(1, custom.RequestCount);
}
[Test]
public void DiagnosticsAreUnique()
{
// Make sure we're not repeating Header/Query names already defined
// in the base ClientOptions
SearchClientOptions options = new SearchClientOptions();
Assert.IsEmpty(GetDuplicates(options.Diagnostics.LoggedHeaderNames));
Assert.IsEmpty(GetDuplicates(options.Diagnostics.LoggedQueryParameters));
// CollectionAssert.Unique doesn't give you the duplicate values
// which is less helpful than it could be
static string GetDuplicates(IEnumerable<string> values)
{
List<string> duplicates = new List<string>();
HashSet<string> unique = new HashSet<string>();
foreach (string value in values)
{
if (!unique.Add(value))
{
duplicates.Add(value);
}
}
return string.Join(", ", duplicates);
}
}
[Test]
public async Task GetServiceStatistics()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
SearchIndexClient client = resources.GetIndexClient();
Response<SearchServiceStatistics> response = await client.GetServiceStatisticsAsync();
Assert.AreEqual(200, response.GetRawResponse().Status);
Assert.IsNotNull(response.Value);
Assert.IsNotNull(response.Value.Counters);
Assert.IsNotNull(response.Value.Counters.DataSourceCounter);
Assert.IsNotNull(response.Value.Counters.DocumentCounter);
Assert.IsNotNull(response.Value.Counters.IndexCounter);
Assert.IsNotNull(response.Value.Counters.IndexerCounter);
Assert.IsNotNull(response.Value.Counters.StorageSizeCounter);
Assert.IsNotNull(response.Value.Counters.SynonymMapCounter);
Assert.IsNotNull(response.Value.Limits);
Assert.NotZero(response.Value.Counters.IndexCounter.Quota ?? 0L);
Assert.NotZero(response.Value.Counters.IndexCounter.Usage);
}
[Test]
[SyncOnly]
public void CreateIndexParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.CreateIndex(null));
Assert.AreEqual("index", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.CreateIndexAsync(null));
Assert.AreEqual("index", ex.ParamName);
}
[Test]
public async Task CreateIndex()
{
await using SearchResources resources = SearchResources.CreateWithNoIndexes(this);
resources.IndexName = Recording.Random.GetName(8);
SearchIndex expectedIndex = SearchResources.GetHotelIndex(resources.IndexName);
SearchIndexClient client = resources.GetIndexClient();
SearchIndex actualIndex = await client.CreateIndexAsync(expectedIndex);
Assert.AreEqual(expectedIndex.Name, actualIndex.Name);
Assert.That(actualIndex.Fields, Is.EqualTo(expectedIndex.Fields).Using(SearchFieldComparer.Shared));
Assert.AreEqual(expectedIndex.Suggesters.Count, actualIndex.Suggesters.Count);
Assert.AreEqual(expectedIndex.Suggesters[0].Name, actualIndex.Suggesters[0].Name);
Assert.AreEqual(expectedIndex.ScoringProfiles.Count, actualIndex.ScoringProfiles.Count);
Assert.AreEqual(expectedIndex.ScoringProfiles[0].Name, actualIndex.ScoringProfiles[0].Name);
}
[Test]
[SyncOnly]
public void UpdateIndexParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.CreateOrUpdateIndex(null));
Assert.AreEqual("index", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.CreateOrUpdateIndexAsync(null));
Assert.AreEqual("index", ex.ParamName);
}
[Test]
public async Task UpdateIndex()
{
await using SearchResources resources = SearchResources.CreateWithNoIndexes(this);
resources.IndexName = Recording.Random.GetName();
SearchIndex initialIndex = SearchResources.GetHotelIndex(resources.IndexName);
SearchIndexClient client = resources.GetIndexClient();
SearchIndex createdIndex = await client.CreateIndexAsync(initialIndex);
string analyzerName = "asciiTags";
createdIndex.Analyzers.Add(
new PatternAnalyzer(analyzerName)
{
Pattern = @"[0-9a-z]+",
Flags =
{
RegexFlag.CaseInsensitive,
RegexFlag.Multiline,
},
Stopwords =
{
"a",
"and",
"the",
},
});
createdIndex.Fields.Add(
new SearchableField("asciiTags", collection: true)
{
AnalyzerName = analyzerName,
IsFacetable = true,
IsFilterable = true,
});
SearchIndex updatedIndex = await client.CreateOrUpdateIndexAsync(
createdIndex,
allowIndexDowntime: true,
onlyIfUnchanged: true);
Assert.AreEqual(createdIndex.Name, updatedIndex.Name);
Assert.That(updatedIndex.Fields, Is.EqualTo(updatedIndex.Fields).Using(SearchFieldComparer.Shared));
Assert.AreEqual(createdIndex.Suggesters.Count, updatedIndex.Suggesters.Count);
Assert.AreEqual(createdIndex.Suggesters[0].Name, updatedIndex.Suggesters[0].Name);
Assert.AreEqual(createdIndex.ScoringProfiles.Count, updatedIndex.ScoringProfiles.Count);
Assert.AreEqual(createdIndex.ScoringProfiles[0].Name, updatedIndex.ScoringProfiles[0].Name);
Assert.AreEqual(createdIndex.Analyzers.Count, updatedIndex.Analyzers.Count);
Assert.AreEqual(createdIndex.Analyzers[0].Name, updatedIndex.Analyzers[0].Name);
}
[Test]
[SyncOnly]
public void GetIndexParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.GetIndex(null));
Assert.AreEqual("indexName", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.GetIndexAsync(null));
Assert.AreEqual("indexName", ex.ParamName);
}
[Test]
public async Task GetIndex()
{
await using SearchResources resources = await SearchResources.CreateWithHotelsIndexAsync(this);
SearchIndexClient client = resources.GetIndexClient();
SearchIndex index = await client.GetIndexAsync(resources.IndexName);
// TODO: Replace with comparison of actual SearchIndex once test framework uses Azure.Search.Documents instead.
Assert.AreEqual(resources.IndexName, index.Name);
Assert.AreEqual(14, index.Fields.Count);
}
[Test]
public async Task GetIndexes()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
SearchIndexClient client = resources.GetIndexClient();
bool found = false;
await foreach (SearchIndex index in client.GetIndexesAsync())
{
found |= string.Equals(resources.IndexName, index.Name, StringComparison.InvariantCultureIgnoreCase);
}
Assert.IsTrue(found, "Shared index not found");
}
[Test]
[AsyncOnly]
public async Task GetIndexesNextPageThrows()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
SearchIndexClient client = resources.GetIndexClient();
AsyncPageable<SearchIndex> pageable = client.GetIndexesAsync();
string continuationToken = Recording.GenerateId();
IAsyncEnumerator<Page<SearchIndex>> e = pageable.AsPages(continuationToken).GetAsyncEnumerator();
// Given a continuationToken above, this actually starts with the second page.
Assert.ThrowsAsync<NotSupportedException>(async () => await e.MoveNextAsync());
}
[Test]
public async Task GetIndexNames()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
SearchIndexClient client = resources.GetIndexClient();
bool found = false;
await foreach (string name in client.GetIndexNamesAsync())
{
found |= string.Equals(resources.IndexName, name, StringComparison.InvariantCultureIgnoreCase);
}
Assert.IsTrue(found, "Shared index name not found");
}
[Test]
[SyncOnly]
public void DeleteIndexParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.DeleteIndex((string)null));
Assert.AreEqual("indexName", ex.ParamName);
ex = Assert.Throws<ArgumentNullException>(() => service.DeleteIndex((SearchIndex)null));
Assert.AreEqual("index", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.DeleteIndexAsync((string)null));
Assert.AreEqual("indexName", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.DeleteIndexAsync((SearchIndex)null));
Assert.AreEqual("index", ex.ParamName);
}
[Test]
[SyncOnly]
public void CreateSynonymMapParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.CreateSynonymMap(null));
Assert.AreEqual("synonymMap", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.CreateSynonymMapAsync(null));
Assert.AreEqual("synonymMap", ex.ParamName);
}
[Test]
[SyncOnly]
public void UpdateSynonymMapParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.CreateOrUpdateSynonymMap(null));
Assert.AreEqual("synonymMap", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.CreateOrUpdateSynonymMapAsync(null));
Assert.AreEqual("synonymMap", ex.ParamName);
}
[Test]
[SyncOnly]
public void GetSynonymMapParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.GetSynonymMap(null));
Assert.AreEqual("synonymMapName", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.GetSynonymMapAsync(null));
Assert.AreEqual("synonymMapName", ex.ParamName);
}
[Test]
[SyncOnly]
public void DeleteSynonymMapParameterValidation()
{
var endpoint = new Uri($"https://my-svc-name.search.windows.net");
var service = new SearchIndexClient(endpoint, new AzureKeyCredential("fake"));
ArgumentException ex = Assert.Throws<ArgumentNullException>(() => service.DeleteSynonymMap((string)null));
Assert.AreEqual("synonymMapName", ex.ParamName);
ex = Assert.Throws<ArgumentNullException>(() => service.DeleteSynonymMap((SynonymMap)null));
Assert.AreEqual("synonymMap", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.DeleteSynonymMapAsync((string)null));
Assert.AreEqual("synonymMapName", ex.ParamName);
ex = Assert.ThrowsAsync<ArgumentNullException>(() => service.DeleteSynonymMapAsync((SynonymMap)null));
Assert.AreEqual("synonymMap", ex.ParamName);
}
[Test]
public async Task CrudSynonymMaps()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
string synonymMapName = Recording.Random.GetName();
SearchIndexClient client = resources.GetIndexClient();
SynonymMap createdMap = await client.CreateSynonymMapAsync(new SynonymMap(synonymMapName, "msft=>Microsoft"));
Assert.AreEqual(synonymMapName, createdMap.Name);
Assert.AreEqual("solr", createdMap.Format);
Assert.AreEqual("msft=>Microsoft", createdMap.Synonyms);
SynonymMap updatedMap = await client.CreateOrUpdateSynonymMapAsync(
new SynonymMap(synonymMapName, "ms,msft=>Microsoft")
{
ETag = createdMap.ETag,
},
onlyIfUnchanged: true);
Assert.AreEqual(synonymMapName, updatedMap.Name);
Assert.AreEqual("solr", updatedMap.Format);
Assert.AreEqual("ms,msft=>Microsoft", updatedMap.Synonyms);
RequestFailedException ex = await CatchAsync<RequestFailedException>(async () =>
await client.CreateOrUpdateSynonymMapAsync(
new SynonymMap(synonymMapName, "ms,msft=>Microsoft")
{
ETag = createdMap.ETag,
},
onlyIfUnchanged: true));
Assert.AreEqual((int)HttpStatusCode.PreconditionFailed, ex.Status);
Response<IReadOnlyList<string>> names = await client.GetSynonymMapNamesAsync();
foreach (string name in names.Value)
{
if (string.Equals(updatedMap.Name, name, StringComparison.OrdinalIgnoreCase))
{
SynonymMap fetchedMap = await client.GetSynonymMapAsync(name);
Assert.AreEqual(updatedMap.Synonyms, fetchedMap.Synonyms);
}
}
await client.DeleteSynonymMapAsync(updatedMap, onlyIfUnchanged: true);
}
[Test]
public async Task AnalyzeText()
{
await using SearchResources resources = await SearchResources.GetSharedHotelsIndexAsync(this);
SearchIndexClient client = resources.GetIndexClient();
AnalyzeTextOptions request = new AnalyzeTextOptions("The quick brown fox jumped over the lazy dog.", LexicalTokenizerName.Whitespace);
Response<IReadOnlyList<AnalyzedTokenInfo>> result = await client.AnalyzeTextAsync(resources.IndexName, request);
IReadOnlyList<AnalyzedTokenInfo> tokens = result.Value;
Assert.AreEqual(new[] { "The", "quick", "brown", "fox", "jumped", "over", "the", "lazy", "dog." }, tokens.Select(t => t.Token));
}
[Test]
public async Task SetScoringProfile()
{
// Testing: https://github.com/Azure/azure-sdk-for-net/issues/16570
await using SearchResources resources = SearchResources.CreateWithNoIndexes(this);
string indexName = Recording.Random.GetName();
string scoringProfileName = Recording.Random.GetName();
// Make sure the index, if created, is cleaned up.
resources.IndexName = indexName;
SearchIndex index = new SearchIndex(indexName)
{
Fields =
{
new SimpleField("id", SearchFieldDataType.String) { IsKey = true },
new SearchableField("title") { IsFilterable = true, IsSortable = false },
},
DefaultScoringProfile = scoringProfileName,
ScoringProfiles =
{
new ScoringProfile(scoringProfileName)
{
TextWeights = new TextWeights(new Dictionary<string, double>
{
{ "title", 2 },
}),
},
},
};
SearchIndexClient client = resources.GetIndexClient();
SearchIndex createdIndex = await client.CreateIndexAsync(index);
Assert.AreEqual(1, createdIndex.ScoringProfiles.Count);
Assert.AreEqual(scoringProfileName, createdIndex.ScoringProfiles[0].Name);
}
}
}
| |
using System;
using System.Collections;
using System.IO;
using System.Xml;
using System.Xml.Schema;
public class Test
{
public static void Main (string [] args)
{
if (args.Length == 0) {
Console.WriteLine ("USAGE: xsdump masterlistname");
return;
}
try {
SchemaDumper.TestDir (args [0], Console.Out);
} catch (Exception ex) {
Console.WriteLine (ex);
}
}
}
public class SchemaDumper
{
public static void TestDir (string masterlist, TextWriter w)
{
FileInfo fi = new FileInfo (masterlist);
string dirname = fi.Directory.Parent.FullName;
SchemaDumper d = new SchemaDumper (w);
#if false
foreach (DirectoryInfo di in new DirectoryInfo (dirname).GetDirectories ())
foreach (FileInfo fi in di.GetFiles ("*.xsd")) {
try {
d.IndentLine ("**** File : " + fi.Name);
d.DumpSchema (XmlSchema.Read (new XmlTextReader (fi.FullName), null));
} catch (Exception ex) {
d.IndentLine ("**** Error in " + fi.Name);
}
}
#else
XmlDocument doc = new XmlDocument ();
doc.Load (fi.FullName);
foreach (XmlElement test in doc.SelectNodes ("/tests/test")) {
// Test schema
string schemaFile = test.SelectSingleNode ("@schema").InnerText;
if (schemaFile.Length > 2)
schemaFile = schemaFile.Substring (2);
bool isValidSchema = test.SelectSingleNode ("@out_s").InnerText == "1";
if (!isValidSchema)
continue;
#endif
try {
d.IndentLine ("**** File : " + schemaFile);
d.depth++;
XmlTextReader xtr = new XmlTextReader (dirname + "/" + schemaFile);
d.DumpSchema (XmlSchema.Read (xtr, null));
xtr.Close ();
} catch (Exception ex) {
d.IndentLine ("**** Error in " + schemaFile);
} finally {
d.depth--;
}
}
}
public int depth;
TextWriter w;
public SchemaDumper (TextWriter w)
{
this.w = w;
}
public void IndentLine (object s)
{
for (int i = 0; i < depth * 2; i++)
w.Write (' ');
w.WriteLine (s);
}
public void DumpSchema (XmlSchema schema)
{
schema.Compile (null);
SortedList sl = new SortedList ();
IndentLine ("**XmlSchema**");
IndentLine ("TargetNamespace: " + schema.TargetNamespace);
IndentLine ("AttributeGroups:");
foreach (DictionaryEntry entry in schema.AttributeGroups)
sl.Add (entry.Key.ToString (), entry.Value);
foreach (DictionaryEntry entry in sl)
DumpAttributeGroup ((XmlSchemaAttributeGroup) entry.Value);
sl.Clear ();
IndentLine ("Attributes:");
foreach (DictionaryEntry entry in schema.Attributes)
sl.Add (entry.Key.ToString (), entry.Value);
foreach (DictionaryEntry entry in sl)
DumpAttribute ((XmlSchemaAttribute) entry.Value);
sl.Clear ();
IndentLine ("Elements:");
foreach (DictionaryEntry entry in schema.Elements)
sl.Add (entry.Key.ToString (), entry.Value);
foreach (DictionaryEntry entry in sl)
DumpElement ((XmlSchemaElement) entry.Value);
sl.Clear ();
IndentLine ("Groups");
foreach (DictionaryEntry entry in schema.Groups)
sl.Add (entry.Key.ToString (), entry.Value);
foreach (DictionaryEntry entry in sl)
DumpGroup ((XmlSchemaGroup) entry.Value);
sl.Clear ();
IndentLine ("IsCompiled: " + schema.IsCompiled);
IndentLine ("Notations");
foreach (DictionaryEntry entry in schema.Notations)
sl.Add (entry.Key.ToString (), entry.Value);
foreach (DictionaryEntry entry in sl)
DumpNotation ((XmlSchemaNotation) entry.Value);
sl.Clear ();
IndentLine ("SchemaTypes:");
foreach (DictionaryEntry entry in schema.Notations)
sl.Add (entry.Key.ToString (), entry.Value);
foreach (DictionaryEntry entry in sl)
if (entry.Value is XmlSchemaSimpleType)
DumpSimpleType ((XmlSchemaSimpleType) entry.Value);
else
DumpComplexType ((XmlSchemaComplexType) entry.Value);
sl.Clear ();
}
public void DumpAttributeGroup (XmlSchemaAttributeGroup ag)
{
depth++;
IndentLine ("**AttributeGroup**");
IndentLine ("Name = " + ag.Name);
if (ag.RedefinedAttributeGroup != null) {
IndentLine ("RedefinedGroup:");
DumpAttributeGroup (ag.RedefinedAttributeGroup);
}
depth--;
}
public void DumpAttribute (XmlSchemaAttribute a)
{
depth++;
IndentLine ("**Attribute**");
IndentLine ("QualifiedName: " + a.QualifiedName);
IndentLine ("RefName: " + a.RefName);
IndentLine ("AttributeType:");
DumpType (a.AttributeType);
depth--;
}
public void DumpElement (XmlSchemaElement e)
{
depth++;
IndentLine ("**Element**");
IndentLine ("QualifiedName: " + e.QualifiedName);
IndentLine ("ElementType:");
DumpType (e.ElementType);
depth--;
}
public void DumpGroup (XmlSchemaGroup g)
{
depth++;
IndentLine ("**Group**");
IndentLine ("Name: " + g.Name);
depth--;
}
public void DumpNotation (XmlSchemaNotation n)
{
depth++;
IndentLine ("**Notation**");
IndentLine ("Name: " + n.Name);
depth--;
}
public void DumpType (object type)
{
depth++;
if (type is XmlSchemaComplexType)
DumpComplexType ((XmlSchemaComplexType) type);
else if (type is XmlSchemaSimpleType)
DumpSimpleType ((XmlSchemaSimpleType) type);
else if (type is XmlSchemaDatatype)
DumpDatatype ((XmlSchemaDatatype) type);
else
IndentLine ("Unexpected Type: " + type);
depth--;
}
public void DumpSimpleType (XmlSchemaSimpleType s)
{
depth++;
IndentLine ("**SimpleType**");
IndentLine ("QualifiedName: " + s.QualifiedName);
IndentLine ("BaseSchemaType:");
DumpType (s.BaseSchemaType);
depth--;
}
public void DumpComplexType (XmlSchemaComplexType c)
{
depth++;
IndentLine ("**ComplexType**");
IndentLine ("QualifiedName: " + c.QualifiedName);
IndentLine ("ContentType: " + c.ContentType);
IndentLine ("ContentTypeParticle: ");
DumpParticle (c.ContentTypeParticle);
IndentLine ("BaseSchemaType:");
DumpType (c.BaseSchemaType);
depth--;
}
public void DumpParticle (XmlSchemaParticle p)
{
if (p is XmlSchemaGroupBase)
DumpGroupBase ((XmlSchemaGroupBase) p);
else if (p is XmlSchemaElement)
DumpElementNoRecurse ((XmlSchemaElement) p);
else if (p is XmlSchemaAny)
DumpAny ((XmlSchemaAny) p);
else
IndentLine (p);
}
public void DumpDatatype (XmlSchemaDatatype d)
{
depth++;
IndentLine ("**Datatype**");
IndentLine ("TokenizedType: " + d.TokenizedType);
IndentLine ("ValueType: " + d.ValueType);
depth--;
}
public void DumpGroupBase (XmlSchemaGroupBase gb)
{
depth++;
IndentLine ("**GroupBase**");
IndentLine ("Type: " + gb);
IndentLine ("MinOccurs: " + gb.MinOccurs);
IndentLine ("MaxOccurs: " + gb.MaxOccurs);
IndentLine ("Items: ");
foreach (XmlSchemaParticle p in gb.Items)
DumpParticle (p);
depth--;
}
public void DumpElementNoRecurse (XmlSchemaElement e)
{
depth++;
IndentLine ("**Element**");
IndentLine ("QualifiedName: " + e.QualifiedName);
depth--;
}
public void DumpAny (XmlSchemaAny any)
{
depth++;
IndentLine ("**Any**");
// IndentLine ("Namespace: " + any.Namespace);
depth--;
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Diagnostics;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Reflection.Emit;
namespace Microsoft.Internal
{
internal static class GenerationServices
{
// Type.GetTypeFromHandle
private static readonly MethodInfo _typeGetTypeFromHandleMethod = typeof(Type).GetMethod("GetTypeFromHandle");
// typeofs are pretty expensive, so we cache them statically
private static readonly Type TypeType = typeof(System.Type);
private static readonly Type StringType = typeof(string);
private static readonly Type CharType = typeof(char);
private static readonly Type BooleanType = typeof(bool);
private static readonly Type ByteType = typeof(byte);
private static readonly Type SByteType = typeof(sbyte);
private static readonly Type Int16Type = typeof(short);
private static readonly Type UInt16Type = typeof(ushort);
private static readonly Type Int32Type = typeof(int);
private static readonly Type UInt32Type = typeof(uint);
private static readonly Type Int64Type = typeof(long);
private static readonly Type UInt64Type = typeof(ulong);
private static readonly Type DoubleType = typeof(double);
private static readonly Type SingleType = typeof(float);
private static readonly Type IEnumerableTypeofT = typeof(System.Collections.Generic.IEnumerable<>);
private static readonly Type IEnumerableType = typeof(System.Collections.IEnumerable);
private static readonly MethodInfo ExceptionGetData = typeof(Exception).GetProperty("Data").GetGetMethod();
private static readonly MethodInfo DictionaryAdd = typeof(IDictionary).GetMethod("Add");
private static readonly ConstructorInfo ObjectCtor = typeof(object).GetConstructor(Type.EmptyTypes);
public static ILGenerator CreateGeneratorForPublicConstructor(this TypeBuilder typeBuilder, Type[] ctrArgumentTypes)
{
ConstructorBuilder ctorBuilder = typeBuilder.DefineConstructor(
MethodAttributes.Public,
CallingConventions.Standard,
ctrArgumentTypes);
ILGenerator ctorIL = ctorBuilder.GetILGenerator();
ctorIL.Emit(OpCodes.Ldarg_0);
ctorIL.Emit(OpCodes.Call, ObjectCtor);
return ctorIL;
}
/// Generates the code that loads the supplied value on the stack
/// This is not as simple as it seems, as different instructions need to be generated depending
/// on its type.
/// We support:
/// 1. All primitive types
/// 2. Strings
/// 3. Enums
/// 4. typeofs
/// 5. nulls
/// 6. Enumerables
/// 7. Delegates on static functions or any of the above
/// Everything else cannot be represented as literals
/// <param name="ilGenerator"></param>
/// <param name="value"></param>
/// <returns></returns>
public static void LoadValue(this ILGenerator ilGenerator, object value)
{
Debug.Assert(ilGenerator != null);
//
// Get nulls out of the way - they are basically typeless, so we just load null
//
if (value == null)
{
ilGenerator.LoadNull();
return;
}
//
// Prepare for literal loading - decide whether we should box, and handle enums properly
//
Type valueType = value.GetType();
object rawValue = value;
if (valueType.IsEnum)
{
// enums are special - we need to load the underlying constant on the stack
rawValue = Convert.ChangeType(value, Enum.GetUnderlyingType(valueType), null);
valueType = rawValue.GetType();
}
//
// Generate IL depending on the valueType - this is messier than it should ever be, but sadly necessary
//
if (valueType == GenerationServices.StringType)
{
// we need to check for strings before enumerables, because strings are IEnumerable<char>
ilGenerator.LoadString((string)rawValue);
}
else if (GenerationServices.TypeType.IsAssignableFrom(valueType))
{
ilGenerator.LoadTypeOf((Type)rawValue);
}
else if (GenerationServices.IEnumerableType.IsAssignableFrom(valueType))
{
// NOTE : strings and dictionaries are also enumerables, but we have already handled those
ilGenerator.LoadEnumerable((IEnumerable)rawValue);
}
else if (
(valueType == GenerationServices.CharType) ||
(valueType == GenerationServices.BooleanType) ||
(valueType == GenerationServices.ByteType) ||
(valueType == GenerationServices.SByteType) ||
(valueType == GenerationServices.Int16Type) ||
(valueType == GenerationServices.UInt16Type) ||
(valueType == GenerationServices.Int32Type)
)
{
// NOTE : Everything that is 32 bit or less uses ldc.i4. We need to pass int32, even if the actual types is shorter - this is IL memory model
// direct casting to (int) won't work, because the value is boxed, thus we need to use Convert.
// Sadly, this will not work for all cases - namely large uint32 - because they can't semantically fit into 32 signed bits
// We have a special case for that next
ilGenerator.LoadInt((int)Convert.ChangeType(rawValue, typeof(int), CultureInfo.InvariantCulture));
}
else if (valueType == GenerationServices.UInt32Type)
{
// NOTE : This one is a bit tricky. Ldc.I4 takes an Int32 as an argument, although it really treats it as a 32bit number
// That said, some UInt32 values are larger that Int32.MaxValue, so the Convert call above will fail, which is why
// we need to treat this case individually and cast to uint, and then - unchecked - to int.
ilGenerator.LoadInt(unchecked((int)((uint)rawValue)));
}
else if (valueType == GenerationServices.Int64Type)
{
ilGenerator.LoadLong((long)rawValue);
}
else if (valueType == GenerationServices.UInt64Type)
{
// NOTE : This one is a bit tricky. Ldc.I8 takes an Int64 as an argument, although it really treats it as a 64bit number
// That said, some UInt64 values are larger that Int64.MaxValue, so the direct case we use above (or Convert, for that matter)will fail, which is why
// we need to treat this case individually and cast to ulong, and then - unchecked - to long.
ilGenerator.LoadLong(unchecked((long)((ulong)rawValue)));
}
else if (valueType == GenerationServices.SingleType)
{
ilGenerator.LoadFloat((float)rawValue);
}
else if (valueType == GenerationServices.DoubleType)
{
ilGenerator.LoadDouble((double)rawValue);
}
else
{
throw new InvalidOperationException(
SR.Format(SR.InvalidMetadataValue, value.GetType().FullName));
}
}
/// Generates the code that adds an object to a dictionary stored in a local variable
/// <param name="ilGenerator"></param>
/// <param name="dictionary"></param>
/// <param name="key"></param>
/// <param name="value"></param>
/// <returns></returns>
public static void AddItemToLocalDictionary(this ILGenerator ilGenerator, LocalBuilder dictionary, object key, object value)
{
Debug.Assert(ilGenerator != null);
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
if (key == null)
{
throw new ArgumentNullException(nameof(key));
}
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
ilGenerator.Emit(OpCodes.Ldloc, dictionary);
ilGenerator.LoadValue(key);
ilGenerator.LoadValue(value);
ilGenerator.Emit(OpCodes.Callvirt, DictionaryAdd);
}
/// Generates the code that adds an object from a local variable to a dictionary also stored in a local
/// <param name="ilGenerator"></param>
/// <param name="dictionary"></param>
/// <param name="key"></param>
/// <param name="value"></param>
/// <returns></returns>
public static void AddLocalToLocalDictionary(this ILGenerator ilGenerator, LocalBuilder dictionary, object key, LocalBuilder value)
{
Debug.Assert(ilGenerator != null);
if (dictionary == null)
{
throw new ArgumentNullException(nameof(dictionary));
}
if (key == null)
{
throw new ArgumentNullException(nameof(key));
}
if (value == null)
{
throw new ArgumentNullException(nameof(value));
}
ilGenerator.Emit(OpCodes.Ldloc, dictionary);
ilGenerator.LoadValue(key);
ilGenerator.Emit(OpCodes.Ldloc, value);
ilGenerator.Emit(OpCodes.Callvirt, DictionaryAdd);
}
/// Generates the code to get the type of an object and store it in a local
/// <param name="ilGenerator"></param>
/// <param name="exception"></param>
/// <param name="dataStore"></param>
/// <returns></returns>
public static void GetExceptionDataAndStoreInLocal(this ILGenerator ilGenerator, LocalBuilder exception, LocalBuilder dataStore)
{
Debug.Assert(ilGenerator != null);
if (exception == null)
{
throw new ArgumentNullException(nameof(exception));
}
if (dataStore == null)
{
throw new ArgumentNullException(nameof(dataStore));
}
ilGenerator.Emit(OpCodes.Ldloc, exception);
ilGenerator.Emit(OpCodes.Callvirt, ExceptionGetData);
ilGenerator.Emit(OpCodes.Stloc, dataStore);
}
private static void LoadEnumerable(this ILGenerator ilGenerator, IEnumerable enumerable)
{
Debug.Assert(ilGenerator != null);
if (enumerable == null)
{
throw new ArgumentNullException(nameof(enumerable));
}
// We load enumerable as an array - this is the most compact and efficient way of representing it
Type elementType = null;
Type closedType = null;
if (ReflectionServices.TryGetGenericInterfaceType(enumerable.GetType(), GenerationServices.IEnumerableTypeofT, out closedType))
{
elementType = closedType.GetGenericArguments()[0];
}
else
{
elementType = typeof(object);
}
//
// elem[] array = new elem[<enumerable.Count()>]
//
Type generatedArrayType = elementType.MakeArrayType();
LocalBuilder generatedArrayLocal = ilGenerator.DeclareLocal(generatedArrayType);
ilGenerator.LoadInt(enumerable.Cast<object>().Count());
ilGenerator.Emit(OpCodes.Newarr, elementType);
ilGenerator.Emit(OpCodes.Stloc, generatedArrayLocal);
int index = 0;
foreach (object value in enumerable)
{
//
//array[<index>] = value;
//
ilGenerator.Emit(OpCodes.Ldloc, generatedArrayLocal);
ilGenerator.LoadInt(index);
ilGenerator.LoadValue(value);
if (GenerationServices.IsBoxingRequiredForValue(value) && !elementType.IsValueType)
{
ilGenerator.Emit(OpCodes.Box, value.GetType());
}
ilGenerator.Emit(OpCodes.Stelem, elementType);
index++;
}
ilGenerator.Emit(OpCodes.Ldloc, generatedArrayLocal);
}
private static bool IsBoxingRequiredForValue(object value)
{
if (value == null)
{
return false;
}
else
{
return value.GetType().IsValueType;
}
}
private static void LoadNull(this ILGenerator ilGenerator)
{
ilGenerator.Emit(OpCodes.Ldnull);
}
private static void LoadString(this ILGenerator ilGenerator, string s)
{
Debug.Assert(ilGenerator != null);
if (s == null)
{
ilGenerator.LoadNull();
}
else
{
ilGenerator.Emit(OpCodes.Ldstr, s);
}
}
private static void LoadInt(this ILGenerator ilGenerator, int value)
{
Debug.Assert(ilGenerator != null);
ilGenerator.Emit(OpCodes.Ldc_I4, value);
}
private static void LoadLong(this ILGenerator ilGenerator, long value)
{
Debug.Assert(ilGenerator != null);
ilGenerator.Emit(OpCodes.Ldc_I8, value);
}
private static void LoadFloat(this ILGenerator ilGenerator, float value)
{
Debug.Assert(ilGenerator != null);
ilGenerator.Emit(OpCodes.Ldc_R4, value);
}
private static void LoadDouble(this ILGenerator ilGenerator, double value)
{
Debug.Assert(ilGenerator != null);
ilGenerator.Emit(OpCodes.Ldc_R8, value);
}
private static void LoadTypeOf(this ILGenerator ilGenerator, Type type)
{
Debug.Assert(ilGenerator != null);
//typeofs() translate into ldtoken and Type::GetTypeFromHandle call
ilGenerator.Emit(OpCodes.Ldtoken, type);
ilGenerator.EmitCall(OpCodes.Call, GenerationServices._typeGetTypeFromHandleMethod, null);
}
}
}
| |
// Amplify Shader Editor - Visual Shader Editing Tool
// Copyright (c) Amplify Creations, Lda <[email protected]>
using UnityEngine;
using UnityEditor;
using System;
namespace AmplifyShaderEditor
{
[Serializable]
[NodeAttributes( "Component Mask", "Misc", "Mask certain channels from vectors/color components" )]
public sealed class ComponentMaskNode : ParentNode
{
private const string OutputLocalVarName = "componentMask";
[SerializeField]
private bool[] m_selection = { true, true, true, true };
[SerializeField]
private int m_outputPortCount = 4;
[SerializeField]
private string[] m_labels;
private int m_cachedOrderId = -1;
private int m_cachedSingularId = -1;
protected override void CommonInit( int uniqueId )
{
base.CommonInit( uniqueId );
AddInputPort( WirePortDataType.FLOAT4, false, Constants.EmptyPortValue );
AddOutputPort( WirePortDataType.FLOAT4, Constants.EmptyPortValue );
m_useInternalPortData = true;
m_autoWrapProperties = true;
m_selectedLocation = PreviewLocation.TopCenter;
m_labels = new string[] { "X", "Y", "Z", "W" };
m_previewShaderGUID = "b78e2b295c265cd439c80d218fb3e88e";
}
public override void SetPreviewInputs()
{
base.SetPreviewInputs();
Vector4 order = new Vector4(-1,-1,-1,-1);
int lastIndex = 0;
int singularId = -1;
if ( m_selection[ 0 ] )
{
order.Set( lastIndex, order.y , order.z , order.w );
lastIndex++;
singularId = 0;
}
if ( m_selection[ 1 ] )
{
order.Set( order.x, lastIndex, order.z, order.w );
lastIndex++;
singularId = 1;
}
if ( m_selection[ 2 ] )
{
order.Set( order.x, order.y, lastIndex, order.w );
lastIndex++;
singularId = 2;
}
if ( m_selection[ 3 ] )
{
order.Set( order.x, order.y, order.z, lastIndex );
lastIndex++;
singularId = 3;
}
if ( lastIndex != 1 )
singularId = -1;
if ( m_cachedOrderId == -1 )
m_cachedOrderId = Shader.PropertyToID( "_Order" );
if ( m_cachedSingularId == -1 )
m_cachedSingularId = Shader.PropertyToID( "_Singular" );
PreviewMaterial.SetVector( m_cachedOrderId, order );
PreviewMaterial.SetFloat( m_cachedSingularId, singularId );
}
public override void OnInputPortConnected( int portId, int otherNodeId, int otherPortId, bool activateNode = true )
{
base.OnInputPortConnected( portId, otherNodeId, otherPortId, activateNode );
UpdatePorts();
}
public override void OnConnectedOutputNodeChanges( int outputPortId, int otherNodeId, int otherPortId, string name, WirePortDataType type )
{
base.OnConnectedOutputNodeChanges( outputPortId, otherNodeId, otherPortId, name, type );
UpdatePorts();
}
void UpdatePorts()
{
m_inputPorts[ 0 ].MatchPortToConnection();
int count = 0;
switch ( m_inputPorts[ 0 ].DataType )
{
case WirePortDataType.FLOAT4:
case WirePortDataType.OBJECT:
case WirePortDataType.COLOR:
{
count = 4;
}
break;
case WirePortDataType.FLOAT3:
{
count = 3;
}
break;
case WirePortDataType.FLOAT2:
{
count = 2;
}
break;
case WirePortDataType.FLOAT:
case WirePortDataType.INT:
case WirePortDataType.FLOAT3x3:
case WirePortDataType.FLOAT4x4:
{ }
break;
}
int activeCount = 0;
if ( count > 0 )
{
for ( int i = 0; i < count; i++ )
{
if ( m_selection[ i ] )
activeCount += 1;
}
}
m_outputPortCount = activeCount;
switch ( activeCount )
{
case 0: ChangeOutputType( m_inputPorts[ 0 ].DataType, false ); break;
case 1: ChangeOutputType( WirePortDataType.FLOAT, false ); break;
case 2: ChangeOutputType( WirePortDataType.FLOAT2, false ); break;
case 3: ChangeOutputType( WirePortDataType.FLOAT3, false ); break;
case 4: ChangeOutputType( m_inputPorts[ 0 ].DataType, false ); break;
}
}
public override void DrawProperties()
{
base.DrawProperties();
//EditorGUI.BeginChangeCheck();
EditorGUILayout.BeginVertical();
int count = 0;
switch ( m_inputPorts[ 0 ].DataType )
{
case WirePortDataType.FLOAT4:
case WirePortDataType.OBJECT:
case WirePortDataType.COLOR:
{
count = 4;
}
break;
case WirePortDataType.FLOAT3:
{
count = 3;
}
break;
case WirePortDataType.FLOAT2:
{
count = 2;
}
break;
case WirePortDataType.FLOAT:
case WirePortDataType.INT:
case WirePortDataType.FLOAT3x3:
case WirePortDataType.FLOAT4x4:
{ }
break;
}
int activeCount = 0;
if ( count > 0 )
{
for ( int i = 0; i < count; i++ )
{
m_selection[ i ] = EditorGUILayoutToggleLeft( m_labels[i], m_selection[ i ] );
m_labels[ i ] = UIUtils.GetComponentForPosition( i, m_inputPorts[ 0 ].DataType ).ToUpper();
if ( m_selection[ i ] )
activeCount += 1;
}
}
if ( activeCount != m_outputPortCount )
{
m_outputPortCount = activeCount;
switch ( activeCount )
{
case 0: ChangeOutputType( m_inputPorts[ 0 ].DataType, false ); break;
case 1: ChangeOutputType( WirePortDataType.FLOAT, false ); break;
case 2: ChangeOutputType( WirePortDataType.FLOAT2, false ); break;
case 3: ChangeOutputType( WirePortDataType.FLOAT3, false ); break;
case 4: ChangeOutputType( m_inputPorts[ 0 ].DataType, false ); break;
}
SetSaveIsDirty();
}
EditorGUILayout.EndVertical();
//if( EditorGUI.EndChangeCheck())
//{
// //MarkForPreviewUpdate();
//}
}
public override string GenerateShaderForOutput( int outputId, ref MasterNodeDataCollector dataCollector, bool ignoreLocalVar )
{
if ( m_outputPorts[ 0 ].IsLocalValue )
return m_outputPorts[ 0 ].LocalValue;
string value = m_inputPorts[ 0 ].GenerateShaderForOutput( ref dataCollector, m_inputPorts[ 0 ].DataType, ignoreLocalVar );
int count = 0;
switch ( m_inputPorts[ 0 ].DataType )
{
case WirePortDataType.FLOAT4:
case WirePortDataType.OBJECT:
case WirePortDataType.COLOR:
{
count = 4;
}
break;
case WirePortDataType.FLOAT3:
{
count = 3;
}
break;
case WirePortDataType.FLOAT2:
{
count = 2;
}
break;
case WirePortDataType.FLOAT:
case WirePortDataType.INT:
{
count = 0;
}
break;
case WirePortDataType.FLOAT3x3:
case WirePortDataType.FLOAT4x4:
{ }
break;
}
if ( count > 0 )
{
value += ".";
for ( int i = 0; i < count; i++ )
{
if ( m_selection[ i ] )
{
value += UIUtils.GetComponentForPosition( i, m_inputPorts[ 0 ].DataType );
}
}
}
RegisterLocalVariable( outputId, value, ref dataCollector, OutputLocalVarName+OutputId );
return m_outputPorts[0].LocalValue;
}
public string GetComponentForPosition( int i )
{
switch ( i )
{
case 0:
{
return ( ( m_outputPorts[ 0 ].DataType == WirePortDataType.COLOR ) ? "r" : "x" );
}
case 1:
{
return ( ( m_outputPorts[ 0 ].DataType == WirePortDataType.COLOR ) ? "g" : "y" );
}
case 2:
{
return ( ( m_outputPorts[ 0 ].DataType == WirePortDataType.COLOR ) ? "b" : "z" );
}
case 3:
{
return ( ( m_outputPorts[ 0 ].DataType == WirePortDataType.COLOR ) ? "a" : "w" );
}
}
return string.Empty;
}
public override void ReadFromString( ref string[] nodeParams )
{
base.ReadFromString( ref nodeParams );
for ( int i = 0; i < 4; i++ )
{
m_selection[ i ] = Convert.ToBoolean( GetCurrentParam( ref nodeParams ) );
}
}
public override void WriteToString( ref string nodeInfo, ref string connectionsInfo )
{
base.WriteToString( ref nodeInfo, ref connectionsInfo );
for ( int i = 0; i < 4; i++ )
{
IOUtils.AddFieldValueToString( ref nodeInfo, m_selection[ i ] );
}
}
}
}
| |
/*===============================================================================================
Submixing Example
Copyright (c), Firelight Technologies Pty, Ltd 2004-2011.
This example shows how to put channels into channel groups, so that you can affect a group
of channels at a time instead of just one channel at a time.
===============================================================================================*/
using System;
using System.Drawing;
using System.Collections;
using System.ComponentModel;
using System.Windows.Forms;
using System.Data;
namespace submixing
{
public class Form1 : System.Windows.Forms.Form
{
private FMOD.System system = null;
private FMOD.Sound[] sound = new FMOD.Sound[5];
private FMOD.Channel[] channel = new FMOD.Channel[5];
private FMOD.ChannelGroup groupA = null, groupB = null, masterGroup = null;
private FMOD.DSP dspecho = null, dspflange = null, dsplowpass = null;
bool mutea = true, muteb = true;
bool echo = true, flange = true, lowpasson = true;
private FMOD.DSPConnection dspconnectiontemp = null;
private System.Windows.Forms.StatusBar statusBar;
private System.Windows.Forms.Button exit_button;
private System.Windows.Forms.Label label;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.Label label5;
private System.Windows.Forms.Label label6;
private System.Windows.Forms.Label label7;
private System.Windows.Forms.Label label8;
private System.Windows.Forms.Label label9;
private System.Windows.Forms.Label label10;
private System.Windows.Forms.Label label11;
private System.Windows.Forms.Label label12;
private System.Windows.Forms.Button muteA;
private System.Windows.Forms.Button flangeB;
private System.Windows.Forms.Button muteB;
private System.Windows.Forms.Button lowpass;
private System.Windows.Forms.Button echoA;
private System.Windows.Forms.Timer timer;
private System.Windows.Forms.Label label13;
private System.Windows.Forms.Label label14;
private System.Windows.Forms.Label label15;
private System.Windows.Forms.Label label16;
private System.Windows.Forms.Label label17;
private System.Windows.Forms.Label label18;
private System.Windows.Forms.Label label19;
private System.Windows.Forms.Label label20;
private System.Windows.Forms.Label label21;
private System.Windows.Forms.Label label22;
private System.Windows.Forms.Label label23;
private System.Windows.Forms.Label label24;
private System.ComponentModel.IContainer components;
public Form1()
{
InitializeComponent();
}
protected override void Dispose( bool disposing )
{
if( disposing )
{
FMOD.RESULT result;
/*
Shut down
*/
/*
Shut down
*/
for (int count = 0; count < 5; count++)
{
if (sound[count] != null)
{
result = sound[count].release();
ERRCHECK(result);
}
}
if (dspecho != null)
{
result = dspecho.release();
ERRCHECK(result);
}
if (dspflange != null)
{
result = dspflange.release();
ERRCHECK(result);
}
if (dsplowpass != null)
{
result = dsplowpass.release();
ERRCHECK(result);
}
if (groupA != null)
{
result = groupA.release();
ERRCHECK(result);
}
if (groupB != null)
{
result = groupB.release();
ERRCHECK(result);
}
if (system != null)
{
result = system.close();
ERRCHECK(result);
result = system.release();
ERRCHECK(result);
}
if (components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent()
{
this.components = new System.ComponentModel.Container();
this.statusBar = new System.Windows.Forms.StatusBar();
this.exit_button = new System.Windows.Forms.Button();
this.label = new System.Windows.Forms.Label();
this.label1 = new System.Windows.Forms.Label();
this.label2 = new System.Windows.Forms.Label();
this.label3 = new System.Windows.Forms.Label();
this.label4 = new System.Windows.Forms.Label();
this.label5 = new System.Windows.Forms.Label();
this.label6 = new System.Windows.Forms.Label();
this.label7 = new System.Windows.Forms.Label();
this.label8 = new System.Windows.Forms.Label();
this.label9 = new System.Windows.Forms.Label();
this.label10 = new System.Windows.Forms.Label();
this.label11 = new System.Windows.Forms.Label();
this.label12 = new System.Windows.Forms.Label();
this.muteA = new System.Windows.Forms.Button();
this.flangeB = new System.Windows.Forms.Button();
this.muteB = new System.Windows.Forms.Button();
this.lowpass = new System.Windows.Forms.Button();
this.echoA = new System.Windows.Forms.Button();
this.timer = new System.Windows.Forms.Timer(this.components);
this.label13 = new System.Windows.Forms.Label();
this.label14 = new System.Windows.Forms.Label();
this.label15 = new System.Windows.Forms.Label();
this.label16 = new System.Windows.Forms.Label();
this.label17 = new System.Windows.Forms.Label();
this.label18 = new System.Windows.Forms.Label();
this.label19 = new System.Windows.Forms.Label();
this.label20 = new System.Windows.Forms.Label();
this.label21 = new System.Windows.Forms.Label();
this.label22 = new System.Windows.Forms.Label();
this.label23 = new System.Windows.Forms.Label();
this.label24 = new System.Windows.Forms.Label();
this.SuspendLayout();
//
// statusBar
//
this.statusBar.Location = new System.Drawing.Point(0, 371);
this.statusBar.Name = "statusBar";
this.statusBar.Size = new System.Drawing.Size(496, 24);
this.statusBar.TabIndex = 22;
//
// exit_button
//
this.exit_button.Location = new System.Drawing.Point(200, 336);
this.exit_button.Name = "exit_button";
this.exit_button.Size = new System.Drawing.Size(72, 24);
this.exit_button.TabIndex = 23;
this.exit_button.Text = "Exit";
this.exit_button.Click += new System.EventHandler(this.exit_button_Click);
//
// label
//
this.label.Location = new System.Drawing.Point(14, 16);
this.label.Name = "label";
this.label.Size = new System.Drawing.Size(264, 32);
this.label.TabIndex = 24;
this.label.Text = "Copyright (c) Firelight Technologies 2004-2011";
this.label.TextAlign = System.Drawing.ContentAlignment.MiddleCenter;
//
// label1
//
this.label1.Location = new System.Drawing.Point(0, 0);
this.label1.Name = "label1";
this.label1.TabIndex = 42;
//
// label2
//
this.label2.Location = new System.Drawing.Point(88, 136);
this.label2.Name = "label2";
this.label2.Size = new System.Drawing.Size(48, 16);
this.label2.TabIndex = 26;
this.label2.Text = "lowpass";
//
// label3
//
this.label3.Location = new System.Drawing.Point(152, 136);
this.label3.Name = "label3";
this.label3.Size = new System.Drawing.Size(80, 16);
this.label3.TabIndex = 27;
this.label3.Text = "mastergroup";
//
// label4
//
this.label4.Location = new System.Drawing.Point(256, 176);
this.label4.Name = "label4";
this.label4.Size = new System.Drawing.Size(64, 16);
this.label4.TabIndex = 28;
this.label4.Text = "flange";
//
// label5
//
this.label5.Location = new System.Drawing.Point(408, 208);
this.label5.Name = "label5";
this.label5.Size = new System.Drawing.Size(64, 16);
this.label5.TabIndex = 29;
this.label5.Text = "d.ogg";
//
// label6
//
this.label6.Location = new System.Drawing.Point(256, 104);
this.label6.Name = "label6";
this.label6.Size = new System.Drawing.Size(64, 16);
this.label6.TabIndex = 30;
this.label6.Text = "echo";
//
// label7
//
this.label7.Location = new System.Drawing.Point(408, 48);
this.label7.Name = "label7";
this.label7.Size = new System.Drawing.Size(88, 16);
this.label7.TabIndex = 31;
this.label7.Text = "drumloop.wav";
//
// label8
//
this.label8.Location = new System.Drawing.Point(408, 168);
this.label8.Name = "label8";
this.label8.Size = new System.Drawing.Size(64, 16);
this.label8.TabIndex = 32;
this.label8.Text = "c.ogg";
//
// label9
//
this.label9.Location = new System.Drawing.Point(408, 96);
this.label9.Name = "label9";
this.label9.Size = new System.Drawing.Size(64, 16);
this.label9.TabIndex = 33;
this.label9.Text = "jaguar.wav";
//
// label10
//
this.label10.Location = new System.Drawing.Point(328, 80);
this.label10.Name = "label10";
this.label10.Size = new System.Drawing.Size(64, 16);
this.label10.TabIndex = 34;
this.label10.Text = "groupA";
//
// label11
//
this.label11.Location = new System.Drawing.Point(320, 208);
this.label11.Name = "label11";
this.label11.Size = new System.Drawing.Size(64, 16);
this.label11.TabIndex = 35;
this.label11.Text = "groupB";
//
// label12
//
this.label12.Location = new System.Drawing.Point(408, 240);
this.label12.Name = "label12";
this.label12.Size = new System.Drawing.Size(64, 16);
this.label12.TabIndex = 36;
this.label12.Text = "e.ogg";
//
// muteA
//
this.muteA.Location = new System.Drawing.Point(32, 224);
this.muteA.Name = "muteA";
this.muteA.Size = new System.Drawing.Size(96, 32);
this.muteA.TabIndex = 37;
this.muteA.Text = "Mute/Unmute group A";
this.muteA.Click += new System.EventHandler(this.muteA_Click);
//
// flangeB
//
this.flangeB.Location = new System.Drawing.Point(144, 264);
this.flangeB.Name = "flangeB";
this.flangeB.Size = new System.Drawing.Size(96, 32);
this.flangeB.TabIndex = 38;
this.flangeB.Text = "Flange on group B";
this.flangeB.Click += new System.EventHandler(this.flangeB_Click);
//
// muteB
//
this.muteB.Location = new System.Drawing.Point(144, 224);
this.muteB.Name = "muteB";
this.muteB.Size = new System.Drawing.Size(96, 32);
this.muteB.TabIndex = 39;
this.muteB.Text = "Mute/Unmute group B";
this.muteB.Click += new System.EventHandler(this.muteB_Click);
//
// lowpass
//
this.lowpass.Location = new System.Drawing.Point(32, 304);
this.lowpass.Name = "lowpass";
this.lowpass.Size = new System.Drawing.Size(96, 48);
this.lowpass.TabIndex = 40;
this.lowpass.Text = "Lowpass on master group (everything)";
this.lowpass.Click += new System.EventHandler(this.lowpass_Click);
//
// echoA
//
this.echoA.Location = new System.Drawing.Point(32, 264);
this.echoA.Name = "echoA";
this.echoA.Size = new System.Drawing.Size(96, 32);
this.echoA.TabIndex = 41;
this.echoA.Text = "Echo on group A";
this.echoA.Click += new System.EventHandler(this.echoA_Click);
//
// timer
//
this.timer.Enabled = true;
this.timer.Interval = 10;
this.timer.Tick += new System.EventHandler(this.timer_Tick);
//
// label13
//
this.label13.Location = new System.Drawing.Point(8, 136);
this.label13.Name = "label13";
this.label13.Size = new System.Drawing.Size(64, 16);
this.label13.TabIndex = 43;
this.label13.Text = "soundcard";
//
// label14
//
this.label14.Location = new System.Drawing.Point(232, 120);
this.label14.Name = "label14";
this.label14.Size = new System.Drawing.Size(16, 16);
this.label14.TabIndex = 44;
this.label14.Text = "/";
//
// label15
//
this.label15.Location = new System.Drawing.Point(304, 96);
this.label15.Name = "label15";
this.label15.Size = new System.Drawing.Size(16, 16);
this.label15.TabIndex = 45;
this.label15.Text = "/";
//
// label16
//
this.label16.Location = new System.Drawing.Point(384, 64);
this.label16.Name = "label16";
this.label16.Size = new System.Drawing.Size(16, 16);
this.label16.TabIndex = 46;
this.label16.Text = "/";
//
// label17
//
this.label17.Location = new System.Drawing.Point(384, 192);
this.label17.Name = "label17";
this.label17.Size = new System.Drawing.Size(16, 16);
this.label17.TabIndex = 47;
this.label17.Text = "/";
//
// label18
//
this.label18.Location = new System.Drawing.Point(232, 152);
this.label18.Name = "label18";
this.label18.Size = new System.Drawing.Size(16, 16);
this.label18.TabIndex = 48;
this.label18.Text = "\\";
//
// label19
//
this.label19.Location = new System.Drawing.Point(304, 192);
this.label19.Name = "label19";
this.label19.Size = new System.Drawing.Size(16, 16);
this.label19.TabIndex = 49;
this.label19.Text = "\\";
//
// label20
//
this.label20.Location = new System.Drawing.Point(384, 232);
this.label20.Name = "label20";
this.label20.Size = new System.Drawing.Size(16, 16);
this.label20.TabIndex = 50;
this.label20.Text = "\\";
//
// label21
//
this.label21.Location = new System.Drawing.Point(72, 136);
this.label21.Name = "label21";
this.label21.Size = new System.Drawing.Size(16, 16);
this.label21.TabIndex = 51;
this.label21.Text = "-";
//
// label22
//
this.label22.Location = new System.Drawing.Point(136, 136);
this.label22.Name = "label22";
this.label22.Size = new System.Drawing.Size(16, 16);
this.label22.TabIndex = 52;
this.label22.Text = "-";
//
// label23
//
this.label23.Location = new System.Drawing.Point(376, 208);
this.label23.Name = "label23";
this.label23.Size = new System.Drawing.Size(16, 16);
this.label23.TabIndex = 53;
this.label23.Text = "-";
//
// label24
//
this.label24.Location = new System.Drawing.Point(384, 88);
this.label24.Name = "label24";
this.label24.Size = new System.Drawing.Size(16, 16);
this.label24.TabIndex = 54;
this.label24.Text = "\\";
//
// Form1
//
this.AutoScaleBaseSize = new System.Drawing.Size(5, 13);
this.ClientSize = new System.Drawing.Size(496, 395);
this.Controls.Add(this.label24);
this.Controls.Add(this.label23);
this.Controls.Add(this.label22);
this.Controls.Add(this.label21);
this.Controls.Add(this.label20);
this.Controls.Add(this.label19);
this.Controls.Add(this.label18);
this.Controls.Add(this.label17);
this.Controls.Add(this.label16);
this.Controls.Add(this.label15);
this.Controls.Add(this.label14);
this.Controls.Add(this.label13);
this.Controls.Add(this.echoA);
this.Controls.Add(this.lowpass);
this.Controls.Add(this.muteB);
this.Controls.Add(this.flangeB);
this.Controls.Add(this.muteA);
this.Controls.Add(this.label12);
this.Controls.Add(this.label11);
this.Controls.Add(this.label10);
this.Controls.Add(this.label9);
this.Controls.Add(this.label8);
this.Controls.Add(this.label7);
this.Controls.Add(this.label6);
this.Controls.Add(this.label5);
this.Controls.Add(this.label4);
this.Controls.Add(this.label3);
this.Controls.Add(this.label2);
this.Controls.Add(this.label1);
this.Controls.Add(this.label);
this.Controls.Add(this.exit_button);
this.Controls.Add(this.statusBar);
this.Name = "Form1";
this.Text = "Sub-mixing example";
this.Load += new System.EventHandler(this.Form1_Load);
this.ResumeLayout(false);
}
#endregion
[STAThread]
static void Main()
{
Application.Run(new Form1());
}
private void Form1_Load(object sender, System.EventArgs e)
{
int count = 0;
uint version = 0;
FMOD.RESULT result;
/*
Create a System object and initialize.
*/
result = FMOD.Factory.System_Create(ref system);
ERRCHECK(result);
result = system.getVersion(ref version);
ERRCHECK(result);
if (version < FMOD.VERSION.number)
{
MessageBox.Show("Error! You are using an old version of FMOD " + version.ToString("X") + ". This program requires " + FMOD.VERSION.number.ToString("X") + ".");
Application.Exit();
}
result = system.init(32, FMOD.INITFLAGS.NORMAL, (IntPtr)null);
ERRCHECK(result);
result = system.createSound("../../../../../examples/media/drumloop.wav", FMOD.MODE.SOFTWARE | FMOD.MODE.LOOP_NORMAL, ref sound[0]);
ERRCHECK(result);
result = system.createSound("../../../../../examples/media/jaguar.wav", FMOD.MODE.SOFTWARE | FMOD.MODE.LOOP_NORMAL, ref sound[1]);
ERRCHECK(result);
result = system.createSound("../../../../../examples/media/c.ogg", FMOD.MODE.SOFTWARE | FMOD.MODE.LOOP_NORMAL, ref sound[2]);
ERRCHECK(result);
result = system.createSound("../../../../../examples/media/d.ogg", FMOD.MODE.SOFTWARE | FMOD.MODE.LOOP_NORMAL, ref sound[3]);
ERRCHECK(result);
result = system.createSound("../../../../../examples/media/e.ogg", FMOD.MODE.SOFTWARE | FMOD.MODE.LOOP_NORMAL, ref sound[4]);
ERRCHECK(result);
result = system.createChannelGroup("Group A", ref groupA);
ERRCHECK(result);
result = system.createChannelGroup("Group B", ref groupB);
ERRCHECK(result);
result = system.getMasterChannelGroup(ref masterGroup);
ERRCHECK(result);
result = masterGroup.addGroup(groupA);
ERRCHECK(result);
result = masterGroup.addGroup(groupB);
ERRCHECK(result);
/*
Start all the sounds!
*/
for (count = 0; count < 5; count++)
{
result = system.playSound(FMOD.CHANNELINDEX.FREE, sound[count], true, ref channel[count]);
ERRCHECK(result);
if (count < 2)
{
result = channel[count].setChannelGroup(groupA);
}
else
{
result = channel[count].setChannelGroup(groupB);
}
ERRCHECK(result);
result = channel[count].setPaused(false);
ERRCHECK(result);
}
/*
Create the DSP effects we want to apply to our submixes.
*/
result = system.createDSPByType(FMOD.DSP_TYPE.ECHO, ref dspecho);
ERRCHECK(result);
result = system.createDSPByType(FMOD.DSP_TYPE.FLANGE, ref dspflange);
ERRCHECK(result);
result = dspflange.setParameter((int)FMOD.DSP_FLANGE.RATE, 1.0f);
ERRCHECK(result);
result = system.createDSPByType(FMOD.DSP_TYPE.LOWPASS, ref dsplowpass);
ERRCHECK(result);
result = dsplowpass.setParameter((int)FMOD.DSP_LOWPASS.CUTOFF, 500.0f);
ERRCHECK(result);
}
private void timer_Tick(object sender, System.EventArgs e)
{
int channelsplaying = 0;
if (system != null)
{
system.getChannelsPlaying(ref channelsplaying);
system.update();
}
statusBar.Text = "Channels Playing " + channelsplaying;
}
private void muteA_Click(object sender, System.EventArgs e)
{
groupA.setMute(mutea);
mutea = !mutea;
}
private void muteB_Click(object sender, System.EventArgs e)
{
groupB.setMute(muteb);
muteb = !muteb;
}
private void echoA_Click(object sender, System.EventArgs e)
{
if (echo)
{
groupA.addDSP(dspecho, ref dspconnectiontemp);
}
else
{
dspecho.remove();
}
echo = !echo;
}
private void flangeB_Click(object sender, System.EventArgs e)
{
if (flange)
{
groupB.addDSP(dspflange, ref dspconnectiontemp);
}
else
{
dspflange.remove();
}
flange = !flange;
}
private void lowpass_Click(object sender, System.EventArgs e)
{
if (lowpasson)
{
masterGroup.addDSP(dsplowpass, ref dspconnectiontemp);
}
else
{
dsplowpass.remove();
}
lowpasson = !lowpasson;
}
private void exit_button_Click(object sender, System.EventArgs e)
{
Application.Exit();
}
private void ERRCHECK(FMOD.RESULT result)
{
if (result != FMOD.RESULT.OK)
{
timer.Stop();
MessageBox.Show("FMOD error! " + result + " - " + FMOD.Error.String(result));
Environment.Exit(-1);
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator 0.17.0.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Management.DataLake.Store
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Azure.OData;
using Microsoft.Rest.Azure;
using Models;
/// <summary>
/// Extension methods for AccountOperations.
/// </summary>
public static partial class AccountOperationsExtensions
{
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount Create(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).CreateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> CreateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.CreateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount BeginCreate(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).BeginCreateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to create.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to create the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> BeginCreateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccount parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginCreateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount Update(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccountUpdateParameters parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).UpdateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> UpdateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccountUpdateParameters parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.UpdateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
public static DataLakeStoreAccount BeginUpdate(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccountUpdateParameters parameters)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).BeginUpdateAsync(resourceGroupName, name, parameters), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Updates the specified Data Lake Store account information.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='name'>
/// The name of the Data Lake Store account to update.
/// </param>
/// <param name='parameters'>
/// Parameters supplied to update the Data Lake Store account.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> BeginUpdateAsync(this IAccountOperations operations, string resourceGroupName, string name, DataLakeStoreAccountUpdateParameters parameters, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.BeginUpdateWithHttpMessagesAsync(resourceGroupName, name, parameters, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
public static void Delete(this IAccountOperations operations, string resourceGroupName, string accountName)
{
Task.Factory.StartNew(s => ((IAccountOperations)s).DeleteAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task DeleteAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.DeleteWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
public static void BeginDelete(this IAccountOperations operations, string resourceGroupName, string accountName)
{
Task.Factory.StartNew(s => ((IAccountOperations)s).BeginDeleteAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to delete.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task BeginDeleteAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.BeginDeleteWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to retrieve.
/// </param>
public static DataLakeStoreAccount Get(this IAccountOperations operations, string resourceGroupName, string accountName)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).GetAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to retrieve.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<DataLakeStoreAccount> GetAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.GetWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Attempts to enable a user managed key vault for encryption of the
/// specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to attempt to enable the Key Vault
/// for.
/// </param>
public static void EnableKeyVault(this IAccountOperations operations, string resourceGroupName, string accountName)
{
Task.Factory.StartNew(s => ((IAccountOperations)s).EnableKeyVaultAsync(resourceGroupName, accountName), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Attempts to enable a user managed key vault for encryption of the
/// specified Data Lake Store account.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account.
/// </param>
/// <param name='accountName'>
/// The name of the Data Lake Store account to attempt to enable the Key Vault
/// for.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task EnableKeyVaultAsync(this IAccountOperations operations, string resourceGroupName, string accountName, CancellationToken cancellationToken = default(CancellationToken))
{
await operations.EnableKeyVaultWithHttpMessagesAsync(resourceGroupName, accountName, null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account(s).
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// A Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
public static IPage<DataLakeStoreAccount> ListByResourceGroup(this IAccountOperations operations, string resourceGroupName, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?))
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListByResourceGroupAsync(resourceGroupName, odataQuery, select, count), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='resourceGroupName'>
/// The name of the Azure resource group that contains the Data Lake Store
/// account(s).
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// A Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListByResourceGroupAsync(this IAccountOperations operations, string resourceGroupName, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupWithHttpMessagesAsync(resourceGroupName, odataQuery, select, count, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// The Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
public static IPage<DataLakeStoreAccount> List(this IAccountOperations operations, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?))
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListAsync(odataQuery, select, count), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='select'>
/// OData Select statement. Limits the properties on each entry to just those
/// requested, e.g. Categories?$select=CategoryName,Description. Optional.
/// </param>
/// <param name='count'>
/// The Boolean value of true or false to request a count of the matching
/// resources included with the resources in the response, e.g.
/// Categories?$count=true. Optional.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListAsync(this IAccountOperations operations, ODataQuery<DataLakeStoreAccount> odataQuery = default(ODataQuery<DataLakeStoreAccount>), string select = default(string), bool? count = default(bool?), CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListWithHttpMessagesAsync(odataQuery, select, count, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<DataLakeStoreAccount> ListByResourceGroupNext(this IAccountOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListByResourceGroupNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within a specific resource group. The
/// response includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListByResourceGroupNextAsync(this IAccountOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListByResourceGroupNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
public static IPage<DataLakeStoreAccount> ListNext(this IAccountOperations operations, string nextPageLink)
{
return Task.Factory.StartNew(s => ((IAccountOperations)s).ListNextAsync(nextPageLink), operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Lists the Data Lake Store accounts within the subscription. The response
/// includes a link to the next page of results, if any.
/// </summary>
/// <param name='operations'>
/// The operations group for this extension method.
/// </param>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
public static async Task<IPage<DataLakeStoreAccount>> ListNextAsync(this IAccountOperations operations, string nextPageLink, CancellationToken cancellationToken = default(CancellationToken))
{
using (var _result = await operations.ListNextWithHttpMessagesAsync(nextPageLink, null, cancellationToken).ConfigureAwait(false))
{
return _result.Body;
}
}
}
}
| |
using Microsoft.TeamFoundation.DistributedTask.WebApi;
using Microsoft.VisualStudio.Services.Agent.Util;
using Microsoft.VisualStudio.Services.Agent.Worker.Build;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Linq;
using BuildWebApi = Microsoft.TeamFoundation.Build.WebApi;
using Microsoft.TeamFoundation.DistributedTask.Logging;
using Microsoft.VisualStudio.Services.Agent.Worker.Container;
namespace Microsoft.VisualStudio.Services.Agent.Worker
{
public sealed class Variables
{
private readonly IHostContext _hostContext;
private readonly ConcurrentDictionary<string, Variable> _nonexpanded = new ConcurrentDictionary<string, Variable>(StringComparer.OrdinalIgnoreCase);
private readonly ISecretMasker _secretMasker;
private readonly object _setLock = new object();
private readonly Tracing _trace;
private ConcurrentDictionary<string, Variable> _expanded;
public IEnumerable<KeyValuePair<string, string>> Public
{
get
{
return _expanded.Values
.Where(x => !x.Secret)
.Select(x => new KeyValuePair<string, string>(x.Name, x.Value));
}
}
public IEnumerable<KeyValuePair<string, string>> Private
{
get
{
return _expanded.Values
.Where(x => x.Secret)
.Select(x => new KeyValuePair<string, string>(x.Name, x.Value));
}
}
public Variables(IHostContext hostContext, IDictionary<string, VariableValue> copy, out List<string> warnings)
{
// Store/Validate args.
_hostContext = hostContext;
_secretMasker = _hostContext.SecretMasker;
_trace = _hostContext.GetTrace(nameof(Variables));
ArgUtil.NotNull(hostContext, nameof(hostContext));
// Validate the dictionary, remove any variable with empty variable name.
ArgUtil.NotNull(copy, nameof(copy));
if (copy.Keys.Any(k => string.IsNullOrWhiteSpace(k)))
{
_trace.Info($"Remove {copy.Keys.Count(k => string.IsNullOrWhiteSpace(k))} variables with empty variable name.");
}
// Initialize the variable dictionary.
List<Variable> variables = new List<Variable>();
foreach (var variable in copy)
{
if (!string.IsNullOrWhiteSpace(variable.Key))
{
variables.Add(new Variable(variable.Key, variable.Value.Value, variable.Value.IsSecret));
}
}
foreach (Variable variable in variables)
{
// Store the variable. The initial secret values have already been
// registered by the Worker class.
_nonexpanded[variable.Name] = variable;
}
// Recursively expand the variables.
RecalculateExpanded(out warnings);
}
// DO NOT add file path variable to here.
// All file path variables needs to be retrive and set through ExecutionContext, so it can handle container file path translation.
public TaskResult? Agent_JobStatus
{
get
{
return GetEnum<TaskResult>(Constants.Variables.Agent.JobStatus);
}
set
{
Set(Constants.Variables.Agent.JobStatus, $"{value}");
}
}
public string Agent_ProxyUrl => Get(Constants.Variables.Agent.ProxyUrl);
public string Agent_ProxyUsername => Get(Constants.Variables.Agent.ProxyUsername);
public string Agent_ProxyPassword => Get(Constants.Variables.Agent.ProxyPassword);
public int? Build_BuildId => GetInt(BuildWebApi.BuildVariables.BuildId);
public string Build_BuildUri => Get(BuildWebApi.BuildVariables.BuildUri);
public BuildCleanOption? Build_Clean => GetEnum<BuildCleanOption>(Constants.Variables.Features.BuildDirectoryClean) ?? GetEnum<BuildCleanOption>(Constants.Variables.Build.Clean);
public long? Build_ContainerId => GetLong(BuildWebApi.BuildVariables.ContainerId);
public string Build_DefinitionName => Get(Constants.Variables.Build.DefinitionName);
public bool? Build_GatedRunCI => GetBoolean(Constants.Variables.Build.GatedRunCI);
public string Build_GatedShelvesetName => Get(Constants.Variables.Build.GatedShelvesetName);
public string Build_Number => Get(Constants.Variables.Build.Number);
public string Build_RepoTfvcWorkspace => Get(Constants.Variables.Build.RepoTfvcWorkspace);
public string Build_RequestedFor => Get((BuildWebApi.BuildVariables.RequestedFor));
public string Build_SourceBranch => Get(Constants.Variables.Build.SourceBranch);
public string Build_SourceTfvcShelveset => Get(Constants.Variables.Build.SourceTfvcShelveset);
public string Build_SourceVersion => Get(Constants.Variables.Build.SourceVersion);
public bool? Build_SyncSources => GetBoolean(Constants.Variables.Build.SyncSources);
public string Release_ArtifactsDirectory => Get(Constants.Variables.Release.ArtifactsDirectory);
public string Release_ReleaseEnvironmentUri => Get(Constants.Variables.Release.ReleaseEnvironmentUri);
public string Release_ReleaseId => Get(Constants.Variables.Release.ReleaseId);
public string Release_ReleaseName => Get(Constants.Variables.Release.ReleaseName);
public string Release_ReleaseUri => Get(Constants.Variables.Release.ReleaseUri);
public int? Release_Download_BufferSize => GetInt(Constants.Variables.Release.ReleaseDownloadBufferSize);
public int? Release_Parallel_Download_Limit => GetInt(Constants.Variables.Release.ReleaseParallelDownloadLimit);
public string System_CollectionId => Get(Constants.Variables.System.CollectionId);
public bool? System_Debug => GetBoolean(Constants.Variables.System.Debug);
public string System_DefinitionId => Get(Constants.Variables.System.DefinitionId);
public bool? System_EnableAccessToken => GetBoolean(Constants.Variables.System.EnableAccessToken);
public HostTypes System_HostType => GetEnum<HostTypes>(Constants.Variables.System.HostType) ?? HostTypes.None;
public string System_PhaseDisplayName => Get(Constants.Variables.System.PhaseDisplayName);
public string System_TaskDefinitionsUri => Get(WellKnownDistributedTaskVariables.TaskDefinitionsUrl);
public string System_TeamProject => Get(BuildWebApi.BuildVariables.TeamProject);
public Guid? System_TeamProjectId => GetGuid(BuildWebApi.BuildVariables.TeamProjectId);
public string System_TFCollectionUrl => Get(WellKnownDistributedTaskVariables.TFCollectionUrl);
public void ExpandValues(IDictionary<string, string> target)
{
_trace.Entering();
var source = new Dictionary<string, string>(StringComparer.OrdinalIgnoreCase);
foreach (Variable variable in _expanded.Values)
{
source[variable.Name] = variable.Value;
}
VarUtil.ExpandValues(_hostContext, source, target);
}
public string Get(string name)
{
Variable variable;
if (_expanded.TryGetValue(name, out variable))
{
_trace.Verbose($"Get '{name}': '{variable.Value}'");
return variable.Value;
}
_trace.Verbose($"Get '{name}' (not found)");
return null;
}
public bool? GetBoolean(string name)
{
bool val;
if (bool.TryParse(Get(name), out val))
{
return val;
}
return null;
}
public T? GetEnum<T>(string name) where T : struct
{
return EnumUtil.TryParse<T>(Get(name));
}
public Guid? GetGuid(string name)
{
Guid val;
if (Guid.TryParse(Get(name), out val))
{
return val;
}
return null;
}
public int? GetInt(string name)
{
int val;
if (int.TryParse(Get(name), out val))
{
return val;
}
return null;
}
public long? GetLong(string name)
{
long val;
if (long.TryParse(Get(name), out val))
{
return val;
}
return null;
}
public void Set(string name, string val, bool secret = false)
{
// Validate the args.
ArgUtil.NotNullOrEmpty(name, nameof(name));
// Add or update the variable.
lock (_setLock)
{
// Determine whether the value should be a secret. The approach taken here is somewhat
// conservative. If the previous expanded variable is a secret, then assume the new
// value should be a secret as well.
//
// Keep in mind, the two goals of flagging variables as secret:
// 1) Mask secrets from the logs.
// 2) Keep secrets out of environment variables for tasks. Secrets must be passed into
// tasks via inputs. It's better to take a conservative approach when determining
// whether a variable should be marked secret. Otherwise nested secret values may
// inadvertantly end up in public environment variables.
secret = secret || (_expanded.ContainsKey(name) && _expanded[name].Secret);
// Register the secret. Secret masker handles duplicates gracefully.
if (secret && !string.IsNullOrEmpty(val))
{
_secretMasker.AddValue(val);
}
// Store the value as-is to the expanded dictionary and the non-expanded dictionary.
// It is not expected that the caller needs to store an non-expanded value and then
// retrieve the expanded value in the same context.
var variable = new Variable(name, val, secret);
_expanded[name] = variable;
_nonexpanded[name] = variable;
_trace.Verbose($"Set '{name}' = '{val}'");
}
}
public bool TryGetValue(string name, out string val)
{
Variable variable;
if (_expanded.TryGetValue(name, out variable))
{
val = variable.Value;
_trace.Verbose($"Get '{name}': '{val}'");
return true;
}
val = null;
_trace.Verbose($"Get '{name}' (not found)");
return false;
}
public void RecalculateExpanded(out List<string> warnings)
{
// TODO: A performance improvement could be made by short-circuiting if the non-expanded values are not dirty. It's unclear whether it would make a significant difference.
// Take a lock to prevent the variables from changing while expansion is being processed.
lock (_setLock)
{
const int MaxDepth = 50;
// TODO: Validate max size? No limit on *nix. Max of 32k per env var on Windows https://msdn.microsoft.com/en-us/library/windows/desktop/ms682653%28v=vs.85%29.aspx
_trace.Entering();
warnings = new List<string>();
// Create a new expanded instance.
var expanded = new ConcurrentDictionary<string, Variable>(_nonexpanded, StringComparer.OrdinalIgnoreCase);
// Process each variable in the dictionary.
foreach (string name in _nonexpanded.Keys)
{
bool secret = _nonexpanded[name].Secret;
_trace.Verbose($"Processing expansion for variable: '{name}'");
// This algorithm handles recursive replacement using a stack.
// 1) Max depth is enforced by leveraging the stack count.
// 2) Cyclical references are detected by walking the stack.
// 3) Additional call frames are avoided.
bool exceedsMaxDepth = false;
bool hasCycle = false;
var stack = new Stack<RecursionState>();
RecursionState state = new RecursionState(name: name, value: _nonexpanded[name].Value ?? string.Empty);
// The outer while loop is used to manage popping items from the stack (of state objects).
while (true)
{
// The inner while loop is used to manage replacement within the current state object.
// Find the next macro within the current value.
while (state.StartIndex < state.Value.Length &&
(state.PrefixIndex = state.Value.IndexOf(Constants.Variables.MacroPrefix, state.StartIndex, StringComparison.Ordinal)) >= 0 &&
(state.SuffixIndex = state.Value.IndexOf(Constants.Variables.MacroSuffix, state.PrefixIndex + Constants.Variables.MacroPrefix.Length, StringComparison.Ordinal)) >= 0)
{
// A candidate was found.
string nestedName = state.Value.Substring(
startIndex: state.PrefixIndex + Constants.Variables.MacroPrefix.Length,
length: state.SuffixIndex - state.PrefixIndex - Constants.Variables.MacroPrefix.Length);
if (!secret)
{
_trace.Verbose($"Found macro candidate: '{nestedName}'");
}
Variable nestedVariable;
if (!string.IsNullOrEmpty(nestedName) &&
_nonexpanded.TryGetValue(nestedName, out nestedVariable))
{
// A matching variable was found.
// Check for max depth.
int currentDepth = stack.Count + 1; // Add 1 since the current state isn't on the stack.
if (currentDepth == MaxDepth)
{
// Warn and break out of the while loops.
_trace.Warning("Exceeds max depth.");
exceedsMaxDepth = true;
warnings.Add(StringUtil.Loc("Variable0ExceedsMaxDepth1", name, MaxDepth));
break;
}
// Check for a cyclical reference.
else if (string.Equals(state.Name, nestedName, StringComparison.OrdinalIgnoreCase) ||
stack.Any(x => string.Equals(x.Name, nestedName, StringComparison.OrdinalIgnoreCase)))
{
// Warn and break out of the while loops.
_trace.Warning("Cyclical reference detected.");
hasCycle = true;
warnings.Add(StringUtil.Loc("Variable0ContainsCyclicalReference", name));
break;
}
else
{
// Push the current state and start a new state. There is no need to break out
// of the inner while loop. It will continue processing the new current state.
secret = secret || nestedVariable.Secret;
if (!secret)
{
_trace.Verbose($"Processing expansion for nested variable: '{nestedName}'");
}
stack.Push(state);
state = new RecursionState(name: nestedName, value: nestedVariable.Value ?? string.Empty);
}
}
else
{
// A matching variable was not found.
if (!secret)
{
_trace.Verbose("Macro not found.");
}
state.StartIndex = state.PrefixIndex + 1;
}
} // End of inner while loop for processing the variable.
// No replacement is performed if something went wrong.
if (exceedsMaxDepth || hasCycle)
{
break;
}
// Check if finished processing the stack.
if (stack.Count == 0)
{
// Store the final value and break out of the outer while loop.
if (!string.Equals(state.Value, _nonexpanded[name].Value, StringComparison.Ordinal))
{
// Register the secret.
if (secret && !string.IsNullOrEmpty(state.Value))
{
_secretMasker.AddValue(state.Value);
}
// Set the expanded value.
expanded[state.Name] = new Variable(state.Name, state.Value, secret);
_trace.Verbose($"Set '{state.Name}' = '{state.Value}'");
}
break;
}
// Adjust and pop the parent state.
if (!secret)
{
_trace.Verbose("Popping recursion state.");
}
RecursionState parent = stack.Pop();
parent.Value = string.Concat(
parent.Value.Substring(0, parent.PrefixIndex),
state.Value,
parent.Value.Substring(parent.SuffixIndex + Constants.Variables.MacroSuffix.Length));
parent.StartIndex = parent.PrefixIndex + (state.Value).Length;
state = parent;
if (!secret)
{
_trace.Verbose($"Intermediate state '{state.Name}': '{state.Value}'");
}
} // End of outer while loop for recursively processing the variable.
} // End of foreach loop over each key in the dictionary.
_expanded = expanded;
} // End of critical section.
}
private sealed class RecursionState
{
public RecursionState(string name, string value)
{
Name = name;
Value = value;
}
public string Name { get; private set; }
public string Value { get; set; }
public int StartIndex { get; set; }
public int PrefixIndex { get; set; }
public int SuffixIndex { get; set; }
}
}
public sealed class Variable
{
public string Name { get; private set; }
public bool Secret { get; private set; }
public string Value { get; private set; }
public Variable(string name, string value, bool secret)
{
ArgUtil.NotNullOrEmpty(name, nameof(name));
Name = name;
Value = value ?? string.Empty;
Secret = secret;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Configuration;
using System.Data;
using System.Data.SqlClient;
using System.Globalization;
using System.Web;
using System.Web.Security;
using System.Web.SessionState;
using Microsoft.ApplicationBlocks.Data;
using ILPathways.Business;
namespace ILPathways.DAL
{
/// <summary>
/// Data access manager for DocumentVersion
/// </summary>
public class DocumentStoreManager : BaseDataManager
{
static string className = "DocumentStoreManager";
/// <summary>
/// Base procedures
/// </summary>
const string GET_PROC = "DocumentVersionGet";
const string SELECT_PROC = "DocumentVersionSelect";
const string DELETE_PROC = "DocumentVersionDelete";
const string INSERT_PROC = "DocumentVersionInsert";
const string UPDATE_PROC = "DocumentVersionUpdate";
/// <summary>
/// Default constructor
/// </summary>
public DocumentStoreManager()
{ }//
#region ====== Core Methods ===============================================
/// <summary>
/// Delete a DocumentVersion record using rowId
/// </summary>
/// <param name="pRowId"></param>
/// <param name="statusMessage"></param>
/// <returns></returns>
public bool Delete( string pRowId, ref string statusMessage )
{
string connectionString = ContentConnection();
bool successful;
SqlParameter[] sqlParameters = new SqlParameter[ 1 ];
sqlParameters[ 0 ] = new SqlParameter( "@RowId", SqlDbType.UniqueIdentifier );
sqlParameters[ 0 ].Value = new Guid( pRowId );
try
{
SqlHelper.ExecuteNonQuery( connectionString, CommandType.StoredProcedure, DELETE_PROC, sqlParameters );
successful = true;
} catch ( Exception ex )
{
LogError( ex, className + ".Delete() " );
statusMessage = className + "- Unsuccessful: Delete(): " + ex.Message.ToString();
successful = false;
}
return successful;
}//
/// <summary>
/// Add an DocumentVersion record
/// </summary>
/// <param name="entity"></param>
/// <param name="statusMessage"></param>
/// <returns></returns>
public static string Create( DocumentVersion entity, ref string statusMessage )
{
string connectionString = ContentConnection();
string newId = "";
#region parameters
SqlParameter[] sqlParameters = new SqlParameter[ 11 ];
sqlParameters[ 0 ] = new SqlParameter( "@Title", SqlDbType.VarChar );
sqlParameters[ 0 ].Size = 200;
sqlParameters[ 0 ].Value = entity.Title;
sqlParameters[ 1 ] = new SqlParameter( "@Summary", SqlDbType.VarChar );
sqlParameters[ 1 ].Size = 500;
sqlParameters[ 1 ].Value = entity.Summary;
sqlParameters[ 2 ] = new SqlParameter( "@Status", SqlDbType.VarChar );
sqlParameters[ 2 ].Size = 25;
sqlParameters[ 2 ].Value = entity.Status;
sqlParameters[ 3 ] = new SqlParameter( "@FileName", SqlDbType.VarChar );
sqlParameters[ 3 ].Size = 150;
sqlParameters[ 3 ].Value = entity.FileName;
sqlParameters[ 4 ] = new SqlParameter( "@FileDate", SqlDbType.DateTime );
if ( entity.FileDate < new System.DateTime( 1980, 1, 1 ) )
entity.FileDate = System.DateTime.Now;
sqlParameters[ 4 ].Value = entity.FileDate;
sqlParameters[ 5 ] = new SqlParameter( "@MimeType", SqlDbType.VarChar );
sqlParameters[ 5 ].Size = 150;
sqlParameters[ 5 ].Value = entity.MimeType;
sqlParameters[ 6 ] = new SqlParameter( "@Bytes", SqlDbType.BigInt );
sqlParameters[ 6 ].Value = entity.ResourceBytes;
sqlParameters[ 7 ] = new SqlParameter( "@Data", SqlDbType.VarBinary );
sqlParameters[ 7 ].Value = entity.ResourceData;
sqlParameters[ 8 ] = new SqlParameter( "@URL", SqlDbType.VarChar );
sqlParameters[ 8 ].Size = 150;
sqlParameters[ 8 ].Value = entity.URL;
sqlParameters[ 9 ] = new SqlParameter( "@CreatedById", SqlDbType.Int );
sqlParameters[ 9 ].Value = entity.CreatedById;
sqlParameters[ 10 ] = new SqlParameter( "@FilePath", entity.FilePath );
#endregion
try
{
SqlDataReader dr = SqlHelper.ExecuteReader( connectionString, CommandType.StoredProcedure, INSERT_PROC, sqlParameters );
if ( dr.HasRows )
{
dr.Read();
newId = dr[ 0 ].ToString();
}
dr.Close();
dr = null;
statusMessage = "successful";
} catch ( Exception ex )
{
LogError( ex, className + ".Create() " );
statusMessage = className + "- Unsuccessful: Create(): " + ex.Message.ToString();
entity.Message = statusMessage;
entity.IsValid = false;
}
return newId;
}
/// <summary>
/// /// Update an DocumentVersion record
/// </summary>
/// <param name="entity"></param>
/// <returns></returns>
public string Update( DocumentVersion entity )
{
string message = "successful";
string connectionString = ContentConnection();
if ( entity.HasDocument() == false )
{
//if missing doc, need to first retrieve a doc, then overlay as needed
DocumentVersion old = Get( entity.RowId );
if ( old.IsValid == false )
{
return "Error: - document not found!";
}
//only overlay potential missing data
entity.MimeType = old.MimeType;
entity.ResourceBytes = old.ResourceBytes;
entity.SetResourceData( old.ResourceBytes, old.ResourceData );
}
#region parameters
SqlParameter[] sqlParameters = new SqlParameter[ 12 ];
sqlParameters[ 0 ] = new SqlParameter( "@RowId", SqlDbType.UniqueIdentifier );
sqlParameters[ 0 ].Value = entity.RowId;
sqlParameters[ 1 ] = new SqlParameter( "@Title", SqlDbType.VarChar );
sqlParameters[ 1 ].Size = 200;
sqlParameters[ 1 ].Value = entity.Title;
sqlParameters[ 2 ] = new SqlParameter( "@Summary", SqlDbType.VarChar );
sqlParameters[ 2 ].Size = 500;
sqlParameters[ 2 ].Value = entity.Summary;
sqlParameters[ 3 ] = new SqlParameter( "@Status", SqlDbType.VarChar );
sqlParameters[ 3 ].Size = 25;
sqlParameters[ 3 ].Value = entity.Status;
sqlParameters[ 4 ] = new SqlParameter( "@FileName", entity.FileName);
sqlParameters[ 5 ] = new SqlParameter( "@FileDate", SqlDbType.DateTime );
sqlParameters[ 5 ].Value = entity.FileDate;
sqlParameters[ 6 ] = new SqlParameter( "@MimeType", entity.MimeType);
sqlParameters[ 7 ] = new SqlParameter( "@Bytes", SqlDbType.BigInt );
sqlParameters[ 7 ].Value = entity.ResourceBytes;
sqlParameters[ 8 ] = new SqlParameter( "@Data", SqlDbType.VarBinary );
sqlParameters[ 8 ].Value = entity.ResourceData;
sqlParameters[ 9 ] = new SqlParameter( "@URL", SqlDbType.VarChar );
sqlParameters[ 9 ].Size = 150;
sqlParameters[ 9 ].Value = entity.URL;
sqlParameters[ 10 ] = new SqlParameter( "@LastUpdatedById", SqlDbType.Int );
sqlParameters[ 10 ].Value = entity.LastUpdatedById;
sqlParameters[ 11 ] = new SqlParameter( "@FilePath", entity.FilePath );
#endregion
try
{
SqlHelper.ExecuteNonQuery( connectionString, UPDATE_PROC, sqlParameters );
message = "successful";
} catch ( Exception ex )
{
LogError( ex, className + ".Update() " );
message = className + "- Unsuccessful: Update(): " + ex.Message.ToString();
entity.Message = message;
entity.IsValid = false;
}
return message;
}//
public string UpdateFileInfo( DocumentVersion entity )
{
string message = "successful";
string connectionString = ContentConnection();
#region parameters
SqlParameter[] sqlParameters = new SqlParameter[ 4 ];
sqlParameters[ 0 ] = new SqlParameter( "@RowId", SqlDbType.UniqueIdentifier );
sqlParameters[ 0 ].Value = entity.RowId;
sqlParameters[ 1 ] = new SqlParameter( "@FilePath", entity.FilePath );
sqlParameters[ 2 ] = new SqlParameter( "@FileName", entity.FileName );
sqlParameters[ 3 ] = new SqlParameter( "@URL", entity.URL);
//sqlParameters[ 4 ] = new SqlParameter( "@LastUpdatedById", entity.LastUpdatedById);
#endregion
try
{
SqlHelper.ExecuteNonQuery( connectionString, "[DocumentVersion.UpdateFileInfo]", sqlParameters );
message = "successful";
}
catch ( Exception ex )
{
LogError( ex, className + ".UpdateFileInfo() " );
message = className + "- Unsuccessful: UpdateFileInfo(): " + ex.Message.ToString();
entity.Message = message;
entity.IsValid = false;
}
return message;
}//
/// <summary>
/// Set the record status to published
/// Used where the doc version is created before the parent (actually the norm). The status defaults to initial on create.
/// If something prevented the parent from being saved the record would then be orphaned - depending on how the current process handles the condition.
/// </summary>
/// <param name="pRowId"></param>
/// <returns></returns>
public static string SetToPublished( string pRowId )
{
string message = "successful";
string connectionString = ContentConnection();
#region parameters
SqlParameter[] sqlParameters = new SqlParameter[ 1 ];
sqlParameters[ 0 ] = new SqlParameter( "@RowId", pRowId );
#endregion
try
{
SqlHelper.ExecuteNonQuery( connectionString, "DocumentVersion_SetToPublished", sqlParameters );
message = "successful";
}
catch ( Exception ex )
{
LogError( ex, className + ".Update() " );
message = "Error- Unsuccessful: Update(): " + ex.Message.ToString();
}
return message;
}//
#endregion
#region ====== Retrieval Methods ===============================================
/// <summary>
/// Get DocumentVersion record
/// </summary>
/// <param name="pRowId"></param>
/// <returns></returns>
public static DocumentVersion Get( string pRowId )
{
Guid rowId = new Guid( pRowId );
return Get( rowId );
}
public static DocumentVersion Get( Guid rowId )
{
string connectionString = ContentConnectionRO();
DocumentVersion entity = new DocumentVersion();
try
{
SqlParameter[] sqlParameters = new SqlParameter[ 1 ];
sqlParameters[ 0 ] = new SqlParameter( "@RowId", SqlDbType.UniqueIdentifier );
sqlParameters[ 0 ].Value = rowId;
SqlDataReader dr = SqlHelper.ExecuteReader( connectionString, GET_PROC, sqlParameters );
if ( dr.HasRows )
{
// it should return only one record.
while ( dr.Read() )
{
entity = Fill( dr );
}
} else
{
entity.Message = "Record not found";
entity.IsValid = false;
}
dr.Close();
dr = null;
return entity;
} catch ( Exception ex )
{
LogError( ex, className + string.Format(".Get() guid: {0}", rowId.ToString()) );
entity.Message = "Unsuccessful: " + className + ".Get(): " + ex.Message.ToString();
entity.IsValid = false;
return entity;
}
}//
/// <summary>
/// Select DocumentVersion related data using passed parameters
/// Note: not sure a select would be done directly against documents as the context of related docs should be at the parent level
/// </summary>
/// <param name="pId"></param>
/// <returns></returns>
private static DataSet Select( int pId )
{
//SET THIS METHOD TO PRIVATE AS NO CURRENT NEED TO DO A SELECT OF MULTIPLE DOCUMENTS
string connectionString = ContentConnectionRO();
SqlParameter[] sqlParameters = new SqlParameter[ 1 ];
sqlParameters[ 0 ] = new SqlParameter( "@id", SqlDbType.Int );
sqlParameters[ 0 ].Value = pId;
DataSet ds = new DataSet();
try
{
ds = SqlHelper.ExecuteDataset( connectionString, CommandType.StoredProcedure, SELECT_PROC, sqlParameters );
if ( ds.HasErrors )
{
return null;
}
return ds;
} catch ( Exception ex )
{
LogError( ex, className + ".Select() " );
return null;
}
}
#endregion
#region ====== Helper Methods ===============================================
/// <summary>
/// Fill an DocumentVersion object from a SqlDataReader
/// </summary>
/// <param name="dr">SqlDataReader</param>
/// <returns>DocumentVersion</returns>
public static DocumentVersion Fill( SqlDataReader dr )
{
DocumentVersion entity = new DocumentVersion();
entity.IsValid = true;
string rowId = GetRowColumn( dr, "RowId", "" );
if ( rowId.Length > 35 )
entity.RowId = new Guid( rowId );
entity.Title = GetRowColumn( dr, "Title", "missing" );
entity.Summary = GetRowColumn( dr, "Summary", "" );
entity.URL = GetRowColumn( dr, "URL", "" );
entity.Status = GetRowColumn( dr, "Status", "" );
entity.FileName = GetRowColumn( dr, "FileName", "" );
entity.FilePath = GetRowColumn( dr, "FilePath", "" );
entity.FileDate = GetRowColumn( dr, "FileDate", System.DateTime.Now );
entity.MimeType = GetRowColumn( dr, "MimeType", "" );
entity.ResourceBytes = long.Parse( GetRowColumn( dr, "Bytes", "0" ) );
if ( entity.ResourceBytes > 0 )
{
entity.SetResourceData( entity.ResourceBytes, dr[ "Data" ] );
}
entity.Created = GetRowColumn( dr, "Created", System.DateTime.MinValue );
//entity.CreatedBy = GetRowColumn( dr, "CreatedBy", "" );
entity.CreatedById = GetRowColumn( dr, "CreatedById", 0 );
entity.LastUpdated = GetRowColumn( dr, "LastUpdated", System.DateTime.MinValue );
//entity.LastUpdatedBy = GetRowColumn( dr, "LastUpdatedBy", "" );
entity.LastUpdatedById = GetRowColumn( dr, "LastUpdatedById", 0 );
return entity;
}//
#endregion
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
//
// Code generated by Microsoft (R) AutoRest Code Generator.
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Fixtures.AcceptanceTestsBodyByte
{
using System;
using System.Linq;
using System.Collections.Generic;
using System.Net;
using System.Net.Http;
using System.Net.Http.Headers;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Rest;
using Microsoft.Rest.Serialization;
using Newtonsoft.Json;
using Models;
/// <summary>
/// ByteModel operations.
/// </summary>
public partial class ByteModel : IServiceOperations<AutoRestSwaggerBATByteService>, IByteModel
{
/// <summary>
/// Initializes a new instance of the ByteModel class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
public ByteModel(AutoRestSwaggerBATByteService client)
{
if (client == null)
{
throw new ArgumentNullException("client");
}
this.Client = client;
}
/// <summary>
/// Gets a reference to the AutoRestSwaggerBATByteService
/// </summary>
public AutoRestSwaggerBATByteService Client { get; private set; }
/// <summary>
/// Get null byte value
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<byte[]>> GetNullWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetNull", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "byte/null").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<byte[]>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<byte[]>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get empty byte value ''
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<byte[]>> GetEmptyWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetEmpty", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "byte/empty").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<byte[]>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<byte[]>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<byte[]>> GetNonAsciiWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetNonAscii", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "byte/nonAscii").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<byte[]>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<byte[]>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Put non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)
/// </summary>
/// <param name='byteBody'>
/// Base64-encoded non-ascii byte string hex(FF FE FD FC FB FA F9 F8 F7 F6)
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse> PutNonAsciiWithHttpMessagesAsync(byte[] byteBody, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (byteBody == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "byteBody");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("byteBody", byteBody);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "PutNonAscii", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "byte/nonAscii").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PUT");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(byteBody != null)
{
_requestContent = SafeJsonConvert.SerializeObject(byteBody, this.Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, Encoding.UTF8);
_httpRequest.Content.Headers.ContentType = MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Get invalid byte value ':::SWAGGER::::'
/// </summary>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<HttpOperationResponse<byte[]>> GetInvalidWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetInvalid", tracingParameters);
}
// Construct URL
var _baseUrl = this.Client.BaseUri.AbsoluteUri;
var _url = new Uri(new Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "byte/invalid").ToString();
// Create HTTP transport objects
HttpRequestMessage _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new Uri(_url);
// Set Headers
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await this.Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new ErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
Error _errorBody = SafeJsonConvert.DeserializeObject<Error>(_responseContent, this.Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new HttpOperationResponse<byte[]>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = SafeJsonConvert.DeserializeObject<byte[]>(_responseContent, this.Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
#region Copyright (c) 2007 Atif Aziz. All rights reserved.
// https://github.com/atifaziz/JSONPath
//
// C# implementation of JSONPath[1]
// [1] http://goessner.net/articles/JsonPath/
//
// The MIT License
//
// Copyright (c) 2007 Atif Aziz . All rights reserved.
// Portions Copyright (c) 2007 Stefan Goessner (goessner.net)
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
//
#endregion
namespace JsonPath
{
#region Imports
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using MatterHackers.DataConverters3D;
using Newtonsoft.Json.Linq;
#endregion
public interface IJsonPathValueSystem
{
bool HasMember(object value, string member);
object GetMemberValue(object value, string member);
IEnumerable<string> GetMembers(object value);
bool IsObject(object value);
bool IsArray(object value);
bool IsPrimitive(object value);
}
public sealed class JsonPathContext
{
public static readonly JsonPathContext Default = new JsonPathContext();
public Func<string /* script */,
object /* value */,
string /* context */,
object /* result */>
ScriptEvaluator { get; set; }
public IJsonPathValueSystem ValueSystem { get; set; }
public IEnumerable<object> Select(object obj, string expr) =>
SelectNodes(obj, expr, (v, _) => v);
public IEnumerable<T> SelectNodes<T>(object obj, string expr, Func<object, string, T> resultor)
{
if (obj == null) throw new ArgumentNullException(nameof(obj));
if (resultor == null) throw new ArgumentNullException(nameof(resultor));
var i = new Interpreter(ValueSystem, ScriptEvaluator);
expr = Normalize(expr);
if (expr.Length >= 1 && expr[0] == '$') // ^\$:?
expr = expr.Substring(expr.Length >= 2 && expr[1] == ';' ? 2 : 1);
return i.Trace(expr, obj, "$", (value, path) => resultor(value, AsBracketNotation(path)));
}
static string Normalize(string expr)
{
var subx = new List<string>();
expr = RegExp.Replace(expr, @"[\['](\??\(.*?\))[\]']", m =>
{
subx.Add(m.Groups[1].Value);
return "[#" + (subx.Count - 1).ToString(CultureInfo.InvariantCulture) + "]";
});
expr = RegExp.Replace(expr, @"'?\.'?|\['?", ";");
expr = RegExp.Replace(expr, @";;;|;;", ";..;");
expr = RegExp.Replace(expr, @";$|'?\]|'$", string.Empty);
expr = RegExp.Replace(expr, @"#([0-9]+)", m =>
{
var index = int.Parse(m.Groups[1].Value, CultureInfo.InvariantCulture);
return subx[index];
});
return expr;
}
public static string AsBracketNotation(string[] indicies)
{
if (indicies == null)
throw new ArgumentNullException(nameof(indicies));
var sb = new StringBuilder();
foreach (var index in indicies)
{
if (sb.Length == 0)
{
sb.Append('$');
}
else
{
sb.Append('[');
if (RegExp.IsMatch(index, @"^[0-9*]+$"))
sb.Append(index);
else
sb.Append('\'').Append(index).Append('\'');
sb.Append(']');
}
}
return sb.ToString();
}
static int? TryParseInt(string str) =>
int.TryParse(str, NumberStyles.Integer, CultureInfo.InvariantCulture, out var n)
? n : (int?) null;
sealed class Interpreter
{
readonly Func<string, object, string, object> _eval;
readonly IJsonPathValueSystem _system;
static readonly IJsonPathValueSystem DefaultValueSystem = new ReflectionValueSystem(); // new BasicValueSystem();
static readonly char[] Colon = { ':' };
static readonly char[] Semicolon = { ';' };
delegate void WalkCallback(object member, string loc, string expr, object value, string path);
public Interpreter(IJsonPathValueSystem valueSystem, Func<string, object, string, object> eval)
{
_eval = eval ?? delegate
{
// @ symbol in expr must be interpreted specially to resolve
// to value. In JavaScript, the implementation would look
// like:
//
// return obj && value && eval(expr.replace(/@/g, "value"));
return null;
};
_system = valueSystem ?? DefaultValueSystem;
}
sealed class TraceArgs
{
public readonly string Expr;
public readonly object Value;
public readonly string Path;
public TraceArgs(string expr, object value, string path)
{
Expr = expr;
Value = value;
Path = path;
}
}
public IEnumerable<T> Trace<T>(string expr, object value, string path, Func<object, string[], T> resultor) =>
Trace(Args(expr, value, path), resultor);
static TraceArgs Args(string expr, object value, string path) =>
new TraceArgs(expr, value, path);
IEnumerable<T> Trace<T>(TraceArgs args, Func<object, string[], T> resultor)
{
var stack = new Stack<TraceArgs>();
stack.Push(args);
while (stack.Count > 0)
{
var popped = stack.Pop();
var expr = popped.Expr;
var value = popped.Value;
var path = popped.Path;
if (string.IsNullOrEmpty(expr))
{
if (path != null)
yield return resultor(value, path.Split(Semicolon));
continue;
}
var i = expr.IndexOf(';');
var atom = i >= 0 ? expr.Substring(0, i) : expr;
var tail = i >= 0 ? expr.Substring(i + 1) : string.Empty;
if (value != null && _system.HasMember(value, atom))
{
stack.Push(Args(tail, Index(value, atom), path + ";" + atom));
}
else if (atom == "*")
{
Walk(atom, tail, value, path, (m, l, x, v, p) => stack.Push(Args(m + ";" + x, v, p)));
}
else if (atom == "..")
{
Walk(atom, tail, value, path, (m, l, x, v, p) =>
{
var result = Index(v, m.ToString());
if (result != null && !_system.IsPrimitive(result))
stack.Push(Args("..;" + x, result, p + ";" + m));
});
stack.Push(Args(tail, value, path));
}
else if (atom.Length > 2 && atom[0] == '(' && atom[atom.Length - 1] == ')') // [(exp)]
{
stack.Push(Args(_eval(atom, value, path.Substring(path.LastIndexOf(';') + 1)) + ";" + tail, value, path));
}
else if (atom.Length > 3 && atom[0] == '?' && atom[1] == '(' && atom[atom.Length - 1] == ')') // [?(exp)]
{
Walk(atom, tail, value, path, (m, l, x, v, p) =>
{
var result = _eval(RegExp.Replace(l, @"^\?\((.*?)\)$", "$1"),
Index(v, m.ToString()), m.ToString());
if (Convert.ToBoolean(result, CultureInfo.InvariantCulture))
stack.Push(Args(m + ";" + x, v, p));
});
}
else if (RegExp.IsMatch(atom, @"^(-?[0-9]*):(-?[0-9]*):?([0-9]*)$")) // [start:end:step] Python slice syntax
{
foreach (var a in Slice(atom, tail, value, path).Reverse())
stack.Push(a);
}
else if (atom.IndexOf(',') >= 0) // [name1,name2,...]
{
foreach (var part in RegExp.Split(atom, @"'?,'?").Reverse())
stack.Push(Args(part + ";" + tail, value, path));
}
}
}
void Walk(string loc, string expr, object value, string path, WalkCallback callback)
{
if (_system.IsPrimitive(value))
return;
if (_system.IsArray(value))
{
var list = (IList) value;
for (var i = list.Count - 1; i >= 0; i--)
callback(i, loc, expr, value, path);
}
else if (_system.IsObject(value))
{
foreach (var key in _system.GetMembers(value).Reverse())
callback(key, loc, expr, value, path);
}
}
static IEnumerable<TraceArgs> Slice(string loc, string expr, object value, string path)
{
if (!(value is IList list))
yield break;
var length = list.Count;
var parts = loc.Split(Colon);
var start = TryParseInt(parts[0]) ?? 0;
var end = TryParseInt(parts[1]) ?? list.Count;
var step = parts.Length > 2 ? TryParseInt(parts[2]) ?? 1 : 1;
start = (start < 0) ? Math.Max(0, start + length) : Math.Min(length, start);
end = (end < 0) ? Math.Max(0, end + length) : Math.Min(length, end);
for (var i = start; i < end; i += step)
yield return Args(i + ";" + expr, value, path);
}
object Index(object obj, string member) =>
_system.GetMemberValue(obj, member);
}
static class RegExp
{
const RegexOptions Options = RegexOptions.ECMAScript;
public static bool IsMatch(string input, string pattern) =>
Regex.IsMatch(input, pattern, Options);
public static string Replace(string input, string pattern, string replacement) =>
Regex.Replace(input, pattern, replacement, Options);
public static string Replace(string input, string pattern, MatchEvaluator evaluator) =>
Regex.Replace(input, pattern, evaluator, Options);
public static IEnumerable<string> Split(string input, string pattern) =>
Regex.Split(input, pattern, Options);
}
public class ReflectionDetails
{
public List<PropertyInfo> Properties { get; set; }
}
public class ReflectionValueSystem : IJsonPathValueSystem
{
//private static Dictionary<string, ReflectionDetails> members = new Dictionary<string, ReflectionDetails>();
public const BindingFlags PublicInstanceProperties = BindingFlags.Public | BindingFlags.Instance;
public static ReflectionDetails GetMember(object item, string path)
{
//if (members.TryGetValue(path, out ReflectionDetails memberInfo))
//{
// return memberInfo;
//}
var reflectionDetails = new ReflectionDetails()
{
Properties = item.GetType().GetProperties(PublicInstanceProperties).Where(pi => pi.GetGetMethod() != null && pi.GetSetMethod() != null).ToList()
};
// members[path] = reflectionDetails;
return reflectionDetails;
}
public class ReflectionTarget
{
public PropertyInfo PropertyInfo { get; }
public object Source { get; }
public ReflectionTarget(PropertyInfo propertyInfo, object source)
{
this.PropertyInfo = propertyInfo;
this.Source = source;
}
}
public bool HasMember(object value, string member)
{
if (!IsPrimitive(value)
&& value is IDictionary dict)
{
return dict.Contains(member);
}
if (value is IList list)
{
return TryParseInt(member) is int i
&& i >= 0
&& i < list.Count;
}
string typeFilter;
// Separate member and typeFilter from member field
(member, typeFilter) = StripTypeFilter(member);
//if (TryParseInt(member) is int)
//{
// return true;
//}
// IEnumerable field must be iterated to check
if (!string.IsNullOrEmpty(typeFilter)
&& GetMemberValue(value, member) is IEnumerable enumerable)
{
// Handle the typeFilter case
foreach (var n in enumerable)
{
if (n.GetType().Name == typeFilter)
{
return true;
}
}
return false;
}
// TODO: Inline once troubleshooting is complete
var hasMember = GetMember(value, member).Properties.Any(p => p.Name == member);
return hasMember;
}
public static ReflectionTarget LastMemberValue { get; private set; }
public object GetMemberValue(object value, string member)
{
// Find and invoke property to get value
LastMemberValue = new ReflectionTarget(
GetMember(value, member).Properties.Where(p => p.Name == member).FirstOrDefault(),
value);
if (IsPrimitive(value))
{
throw new ArgumentException(null, nameof(value));
}
if (value is IDictionary dict)
{
return dict[member];
}
if (value is IList list
&& TryParseInt(member) is int i
&& i >= 0
&& i < list.Count)
{
return list[i];
}
string typeFilter;
// Separate member and typeFilter from member field
(member, typeFilter) = StripTypeFilter(member);
var propertyInfo = GetMember(value, member).Properties.Where(p => p.Name == member).FirstOrDefault();
LastMemberValue = new ReflectionTarget(propertyInfo, value);
if (value is IEnumerable enumerable)
{
if (TryParseInt(member) is int k)
{
var v = 0;
foreach (var n in enumerable)
{
if (v++ == k)
{
return n;
}
}
}
foreach (var n in enumerable)
{
if (n.GetType().Name == typeFilter)
{
return n;
}
}
}
// Invoke property to get value
var propertyValue = propertyInfo.GetGetMethod().Invoke(value, null);
if (!string.IsNullOrEmpty(typeFilter) && propertyValue is IEnumerable items)
{
foreach(var item in items)
{
if (item.GetType().Name == typeFilter)
{
return item;
}
}
}
return propertyValue;
}
public IEnumerable<string> GetMembers(object value) =>
((IDictionary)value).Keys.Cast<string>();
public bool IsObject(object value) => value is IDictionary;
public bool IsArray(object value) => value is IList;
public bool IsPrimitive(object value) =>
value == null
? throw new ArgumentNullException(nameof(value))
: Type.GetTypeCode(value.GetType()) != TypeCode.Object;
private static (string member, string filter) StripTypeFilter(string member)
{
int startFilter = member.IndexOf('<');
int endFilter = member.IndexOf('>');
int length = endFilter - startFilter;
string typeFilter = "";
if (startFilter != -1 && endFilter != -1 && length > 1)
{
typeFilter = member.Substring(startFilter + 1, length - 1);
member = member.Substring(0, startFilter);
}
return (member, typeFilter);
}
}
sealed class BasicValueSystem : IJsonPathValueSystem
{
public bool HasMember(object value, string member)
{
if (!IsPrimitive(value)
&& value is IDictionary dict)
{
return dict.Contains(member);
}
if (value is JArray array)
{
return TryParseInt(member) is int j
&& j >= 0
&& j < array.Count;
}
if (value is JToken token)
{
return token[member] != null;
}
return value is IList list
&& TryParseInt(member) is int i
&& i >= 0
&& i < list.Count;
}
public object GetMemberValue(object value, string member)
{
if (IsPrimitive(value))
{
throw new ArgumentException(null, nameof(value));
}
if (value is JArray array
&& TryParseInt(member) is int j
&& j >= 0
&& j < array.Count)
{
return array[j];
}
if (value is JToken token)
{
return token[member];
}
if (value is IDictionary dict)
{
return dict[member];
}
if (!(value is IList list))
{
throw new ArgumentException(nameof(value));
}
if (TryParseInt(member) is int i
&& i >= 0
&& i < list.Count)
{
return list[i];
}
return null;
}
public IEnumerable<string> GetMembers(object value) =>
((IDictionary) value).Keys.Cast<string>();
public bool IsObject(object value) => value is IDictionary;
public bool IsArray(object value) => value is IList;
public bool IsPrimitive(object value) =>
value == null
? throw new ArgumentNullException(nameof(value))
: Type.GetTypeCode(value.GetType()) != TypeCode.Object;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System.Net.Http.Headers;
using Xunit;
namespace System.Net.Http.Tests
{
public class RetryConditionHeaderValueTest
{
[Fact]
public void Ctor_EntityTagOverload_MatchExpectation()
{
RetryConditionHeaderValue retryCondition = new RetryConditionHeaderValue(new TimeSpan(0, 0, 3));
Assert.Equal(new TimeSpan(0, 0, 3), retryCondition.Delta);
Assert.Null(retryCondition.Date);
Assert.Throws<ArgumentOutOfRangeException>(() => { new RetryConditionHeaderValue(new TimeSpan(1234567, 0, 0)); });
}
[Fact]
public void Ctor_DateOverload_MatchExpectation()
{
RetryConditionHeaderValue retryCondition = new RetryConditionHeaderValue(
new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
Assert.Null(retryCondition.Delta);
Assert.Equal(new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero), retryCondition.Date);
}
[Fact]
public void ToString_UseDifferentRetryConditions_AllSerializedCorrectly()
{
RetryConditionHeaderValue retryCondition = new RetryConditionHeaderValue(new TimeSpan(0, 0, 50000000));
Assert.Equal("50000000", retryCondition.ToString());
retryCondition = new RetryConditionHeaderValue(new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
Assert.Equal("Thu, 15 Jul 2010 12:33:57 GMT", retryCondition.ToString());
}
[Fact]
public void GetHashCode_UseSameAndDifferentRetryConditions_SameOrDifferentHashCodes()
{
RetryConditionHeaderValue retryCondition1 = new RetryConditionHeaderValue(new TimeSpan(0, 0, 1000000));
RetryConditionHeaderValue retryCondition2 = new RetryConditionHeaderValue(new TimeSpan(0, 0, 1000000));
RetryConditionHeaderValue retryCondition3 = new RetryConditionHeaderValue(
new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
RetryConditionHeaderValue retryCondition4 = new RetryConditionHeaderValue(
new DateTimeOffset(2008, 8, 16, 13, 44, 10, TimeSpan.Zero));
RetryConditionHeaderValue retryCondition5 = new RetryConditionHeaderValue(
new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
RetryConditionHeaderValue retryCondition6 = new RetryConditionHeaderValue(new TimeSpan(0, 0, 2000000));
Assert.Equal(retryCondition1.GetHashCode(), retryCondition2.GetHashCode());
Assert.NotEqual(retryCondition1.GetHashCode(), retryCondition3.GetHashCode());
Assert.NotEqual(retryCondition3.GetHashCode(), retryCondition4.GetHashCode());
Assert.Equal(retryCondition3.GetHashCode(), retryCondition5.GetHashCode());
Assert.NotEqual(retryCondition1.GetHashCode(), retryCondition6.GetHashCode());
}
[Fact]
public void Equals_UseSameAndDifferentRetrys_EqualOrNotEqualNoExceptions()
{
RetryConditionHeaderValue retryCondition1 = new RetryConditionHeaderValue(new TimeSpan(0, 0, 1000000));
RetryConditionHeaderValue retryCondition2 = new RetryConditionHeaderValue(new TimeSpan(0, 0, 1000000));
RetryConditionHeaderValue retryCondition3 = new RetryConditionHeaderValue(
new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
RetryConditionHeaderValue retryCondition4 = new RetryConditionHeaderValue(
new DateTimeOffset(2008, 8, 16, 13, 44, 10, TimeSpan.Zero));
RetryConditionHeaderValue retryCondition5 = new RetryConditionHeaderValue(
new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
RetryConditionHeaderValue retryCondition6 = new RetryConditionHeaderValue(new TimeSpan(0, 0, 2000000));
Assert.False(retryCondition1.Equals(null), "delta vs. <null>");
Assert.True(retryCondition1.Equals(retryCondition2), "delta vs. delta");
Assert.False(retryCondition1.Equals(retryCondition3), "delta vs. date");
Assert.False(retryCondition3.Equals(retryCondition1), "date vs. delta");
Assert.False(retryCondition3.Equals(retryCondition4), "date vs. different date");
Assert.True(retryCondition3.Equals(retryCondition5), "date vs. date");
Assert.False(retryCondition1.Equals(retryCondition6), "delta vs. different delta");
}
[Fact]
public void Clone_Call_CloneFieldsMatchSourceFields()
{
RetryConditionHeaderValue source = new RetryConditionHeaderValue(new TimeSpan(0, 0, 123456789));
RetryConditionHeaderValue clone = (RetryConditionHeaderValue)((ICloneable)source).Clone();
Assert.Equal(source.Delta, clone.Delta);
Assert.Null(clone.Date);
source = new RetryConditionHeaderValue(new DateTimeOffset(2010, 7, 15, 12, 33, 57, TimeSpan.Zero));
clone = (RetryConditionHeaderValue)((ICloneable)source).Clone();
Assert.Null(clone.Delta);
Assert.Equal(source.Date, clone.Date);
}
[Fact]
public void GetRetryConditionLength_DifferentValidScenarios_AllReturnNonZero()
{
RetryConditionHeaderValue result = null;
CallGetRetryConditionLength(" 1234567890 ", 1, 11, out result);
Assert.Equal(new TimeSpan(0, 0, 1234567890), result.Delta);
Assert.Null(result.Date);
CallGetRetryConditionLength("1", 0, 1, out result);
Assert.Equal(new TimeSpan(0, 0, 1), result.Delta);
Assert.Null(result.Date);
CallGetRetryConditionLength("001", 0, 3, out result);
Assert.Equal(new TimeSpan(0, 0, 1), result.Delta);
Assert.Null(result.Date);
CallGetRetryConditionLength("Wed, 09 Nov 1994 08:49:37 GMT", 0, 29, out result);
Assert.Null(result.Delta);
Assert.Equal(new DateTimeOffset(1994, 11, 9, 8, 49, 37, TimeSpan.Zero), result.Date);
CallGetRetryConditionLength("Sun, 06 Nov 1994 08:49:37 GMT ", 0, 34, out result);
Assert.Null(result.Delta);
Assert.Equal(new DateTimeOffset(1994, 11, 6, 8, 49, 37, TimeSpan.Zero), result.Date);
}
[Fact]
public void GetRetryConditionLength_DifferentInvalidScenarios_AllReturnZero()
{
CheckInvalidGetRetryConditionLength(" 1", 0); // no leading whitespaces allowed
CheckInvalidGetRetryConditionLength(" Wed 09 Nov 1994 08:49:37 GMT", 0);
CheckInvalidGetRetryConditionLength("-5", 0);
// Even though the first char is a valid 'delta', GetRetryConditionLength() expects the whole string to be
// a valid 'delta'.
CheckInvalidGetRetryConditionLength("1.5", 0);
CheckInvalidGetRetryConditionLength("5123,", 0);
CheckInvalidGetRetryConditionLength("123456789012345678901234567890", 0); // >>Int32.MaxValue
CheckInvalidGetRetryConditionLength("9999999999", 0); // >Int32.MaxValue but same amount of digits
CheckInvalidGetRetryConditionLength("Wed, 09 Nov", 0);
CheckInvalidGetRetryConditionLength("W/Wed 09 Nov 1994 08:49:37 GMT", 0);
CheckInvalidGetRetryConditionLength("Wed 09 Nov 1994 08:49:37 GMT,", 0);
CheckInvalidGetRetryConditionLength("", 0);
CheckInvalidGetRetryConditionLength(null, 0);
}
[Fact]
public void Parse_SetOfValidValueStrings_ParsedCorrectly()
{
CheckValidParse(" 123456789 ", new RetryConditionHeaderValue(new TimeSpan(0, 0, 123456789)));
CheckValidParse(" Sun, 06 Nov 1994 08:49:37 GMT ",
new RetryConditionHeaderValue(new DateTimeOffset(1994, 11, 6, 8, 49, 37, TimeSpan.Zero)));
}
[Fact]
public void Parse_SetOfInvalidValueStrings_Throws()
{
CheckInvalidParse("123 ,"); // no delimiter allowed
CheckInvalidParse("Sun, 06 Nov 1994 08:49:37 GMT ,"); // no delimiter allowed
CheckInvalidParse("123 Sun, 06 Nov 1994 08:49:37 GMT");
CheckInvalidParse("Sun, 06 Nov 1994 08:49:37 GMT \"x\"");
CheckInvalidParse(null);
CheckInvalidParse(string.Empty);
}
[Fact]
public void TryParse_SetOfValidValueStrings_ParsedCorrectly()
{
CheckValidTryParse(" 123456789 ", new RetryConditionHeaderValue(new TimeSpan(0, 0, 123456789)));
CheckValidTryParse(" Sun, 06 Nov 1994 08:49:37 GMT ",
new RetryConditionHeaderValue(new DateTimeOffset(1994, 11, 6, 8, 49, 37, TimeSpan.Zero)));
}
[Fact]
public void TryParse_SetOfInvalidValueStrings_ReturnsFalse()
{
CheckInvalidTryParse("123 ,"); // no delimiter allowed
CheckInvalidTryParse("Sun, 06 Nov 1994 08:49:37 GMT ,"); // no delimiter allowed
CheckInvalidTryParse("123 Sun, 06 Nov 1994 08:49:37 GMT");
CheckInvalidTryParse("Sun, 06 Nov 1994 08:49:37 GMT \"x\"");
CheckInvalidTryParse(null);
CheckInvalidTryParse(string.Empty);
}
#region Helper methods
private void CheckValidParse(string input, RetryConditionHeaderValue expectedResult)
{
RetryConditionHeaderValue result = RetryConditionHeaderValue.Parse(input);
Assert.Equal(expectedResult, result);
}
private void CheckInvalidParse(string input)
{
Assert.Throws<FormatException>(() => { RetryConditionHeaderValue.Parse(input); });
}
private void CheckValidTryParse(string input, RetryConditionHeaderValue expectedResult)
{
RetryConditionHeaderValue result = null;
Assert.True(RetryConditionHeaderValue.TryParse(input, out result));
Assert.Equal(expectedResult, result);
}
private void CheckInvalidTryParse(string input)
{
RetryConditionHeaderValue result = null;
Assert.False(RetryConditionHeaderValue.TryParse(input, out result));
Assert.Null(result);
}
private static void CallGetRetryConditionLength(string input, int startIndex, int expectedLength,
out RetryConditionHeaderValue result)
{
object temp = null;
Assert.Equal(expectedLength, RetryConditionHeaderValue.GetRetryConditionLength(input, startIndex,
out temp));
result = temp as RetryConditionHeaderValue;
}
private static void CheckInvalidGetRetryConditionLength(string input, int startIndex)
{
object result = null;
Assert.Equal(0, RetryConditionHeaderValue.GetRetryConditionLength(input, startIndex, out result));
Assert.Null(result);
}
#endregion
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System.Diagnostics;
using System.Diagnostics.Contracts;
using Microsoft.Research.ClousotRegression;
namespace APIProtocols
{
public enum Case
{
A,
B,
C,
}
class APISpecExample
{
string name;
public bool HasName
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 1, MethodILOffset = 0)]
get
{
// inferred: Contract.Ensures(Contract.Result<bool>() == (name != null));
return name != null;
}
}
public string Name
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 28, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 22, MethodILOffset = 33)]
get
{
Contract.Requires(HasName);
Contract.Ensures(Contract.Result<string>() != null);
return name;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 22, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 27)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 15, MethodILOffset = 27)]
set
{
Contract.Ensures(HasName || value == null);
this.name = value;
}
}
Case _case;
public APISpecExample()
{
Contract.Ensures(State == Case.A);
_case = Case.A;
}
public Case State
{
get
{
Contract.Ensures(Contract.Result<Case>() == _case);
return _case;
}
}
public void Start()
{
Contract.Requires(State == Case.A);
Contract.Ensures(State == Case.B);
_case = Case.B;
}
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 58, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 64, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 37, MethodILOffset = 72)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 51, MethodILOffset = 72)]
public bool MayStart()
{
Contract.Requires(State == Case.A);
Contract.Ensures(!Contract.Result<bool>() && (State == Case.A) || Contract.Result<bool>() && (State == Case.B));
_case = Case.B;
return (State == Case.B);
}
public void Run()
{
Contract.Requires(State == Case.B);
Contract.Ensures(State == Case.B);
}
public void Stop()
{
Contract.Requires(State == Case.B);
Contract.Ensures(State == Case.C);
_case = Case.C;
}
public void Reset()
{
Contract.Requires(State == Case.C);
Contract.Ensures(State == Case.A);
_case = Case.A;
}
}
class TestAPI
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 7, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 16, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 22, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 28, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 34, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 43, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 49, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 55, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 7)]
[RegressionOutcome(Outcome = ProofOutcome.False, Message = @"requires is false: State == Case.A", PrimaryILOffset = 9, MethodILOffset = 16)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 22)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 28)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 34)]
[RegressionOutcome(Outcome = ProofOutcome.False, Message = @"requires is false: State == Case.B", PrimaryILOffset = 9, MethodILOffset = 43)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 49)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 55)]
public static void Test1(bool bug1, bool bug2)
{
APISpecExample a = new APISpecExample();
a.Start();
if (bug1)
{
a.Start();
}
a.Run();
a.Run();
a.Stop();
if (bug2)
{
a.Run();
}
a.Reset();
a.Start();
}
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 7, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 17, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 32, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 38, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 7)]
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = "requires unproven: State == Case.B", PrimaryILOffset = 9, MethodILOffset = 17)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 32)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 38)]
public static void Test1b(bool bug1, bool bug2)
{
APISpecExample a = new APISpecExample();
bool success = a.MayStart();
if (bug1)
{
a.Run();
Contract.Assume(false);
}
if (!success)
{
a.Start();
}
a.Run();
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 13, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: HasName", PrimaryILOffset = 6, MethodILOffset = 13)]
public static string Test2(APISpecExample a)
{
Contract.Requires(a != null);
return a.Name;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 13, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 21, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 6, MethodILOffset = 21)]
public static string Test3(APISpecExample a)
{
Contract.Requires(a != null);
if (a.HasName) return a.Name;
return "default";
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 13, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 24, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 6, MethodILOffset = 24)]
public static string Test4(APISpecExample a)
{
Contract.Requires(a != null);
Contract.Requires(a.HasName);
return a.Name;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 18, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 24, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 6, MethodILOffset = 24)]
public static string Test5(APISpecExample a)
{
Contract.Requires(a != null);
a.Name = "foo";
return a.Name;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 14, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 20, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = @"requires unproven: HasName", PrimaryILOffset = 6, MethodILOffset = 20)]
public static string Test6(APISpecExample a, string s)
{
Contract.Requires(a != null);
a.Name = s;
return a.Name;
}
}
}
namespace Bierhoff
{
enum RSState { unread, read, end, closed }
class ResultSet
{
RSState state;
public RSState State
{
get {
// could be inferred, but this way, we don't make it brittle.
Contract.Ensures(Contract.Result<RSState>() == this.state);
return this.state;
}
}
public bool Valid
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 33, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 41, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 6, MethodILOffset = 49)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 14, MethodILOffset = 49)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 6, MethodILOffset = 51)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 27, MethodILOffset = 51)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 27, MethodILOffset = 49)]
get
{
Contract.Ensures(Contract.Result<bool>() == (this.State == RSState.unread || this.State == RSState.read));
return this.State == RSState.unread || this.State == RSState.read;
}
}
public bool IsOpen
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 33, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 41, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 6, MethodILOffset = 49)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 6, MethodILOffset = 51)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 14, MethodILOffset = 49)]
// happens on path where Valid is true, then second reference to this.State never happens.
// F: Now we avoid emitting warnings for unreached parts of postconditions
//[RegressionOutcome(Outcome = ProofOutcome.Bottom, Message = @"reference use unreached", PrimaryILOffset = 14, MethodILOffset = 51)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 27, MethodILOffset = 51)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 27, MethodILOffset = 49)]
get
{
Contract.Ensures(Contract.Result<bool>() == (this.Valid || this.State == RSState.end));
return Valid || this.State == RSState.end;
}
}
[ClousotRegressionTest("cci1only")]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 55, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 19, MethodILOffset = 61)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 48, MethodILOffset = 61)]
public bool Next()
{
Contract.Requires(IsOpen);
Contract.Ensures(Contract.Result<bool>() && State == RSState.unread ||
!Contract.Result<bool>() && State == RSState.end);
state = RSState.unread;
return true;
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 27, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 12, MethodILOffset = 33)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 20, MethodILOffset = 33)]
public int GetInt(int column)
{
Contract.Requires(Valid);
Contract.Ensures(State == RSState.read);
state = RSState.read;
return column;
}
public bool WasNull
{
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 0)]
get
{
Contract.Requires(State == RSState.read);
return false;
}
}
[ClousotRegressionTest]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as field receiver)", PrimaryILOffset = 16, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 1, MethodILOffset = 21)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "ensures is valid", PrimaryILOffset = 9, MethodILOffset = 21)]
public void Close()
{
Contract.Ensures(State == RSState.closed);
state = RSState.closed;
}
}
class TestResultSet
{
[ClousotRegressionTest("cci1only")]
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = "Possibly calling a method on a null reference 'rs'", PrimaryILOffset = 1, MethodILOffset = 0)]
[RegressionOutcome (Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 12, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 23, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 28, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 34, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "valid non-null reference (as receiver)", PrimaryILOffset = 53, MethodILOffset = 0)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 6, MethodILOffset = 12)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 6, MethodILOffset = 23)]
[RegressionOutcome(Outcome = ProofOutcome.True, Message = "requires is valid", PrimaryILOffset = 9, MethodILOffset = 34)]
[RegressionOutcome(Outcome = ProofOutcome.Top, Message = "requires unproven: Valid", PrimaryILOffset = 6, MethodILOffset = 53)]
public static int? GetFirstInt(ResultSet rs)
{
int? result;
Contract.Requires(rs.IsOpen);
if (rs.Next())
{
result = rs.GetInt(1);
if (rs.WasNull)
{
result = null;
}
return result;
}
else
{
return rs.GetInt(1);
}
}
}
}
| |
// ------------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All Rights Reserved. Licensed under the MIT License. See License in the project root for license information.
// ------------------------------------------------------------------------------
// **NOTE** This file was generated by a tool and any changes will be overwritten.
// Template Source: Templates\CSharp\Requests\EntityRequest.cs.tt
namespace Microsoft.Graph
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Net.Http;
using System.Threading;
using System.Linq.Expressions;
/// <summary>
/// The type PlannerBucketTaskBoardTaskFormatRequest.
/// </summary>
public partial class PlannerBucketTaskBoardTaskFormatRequest : BaseRequest, IPlannerBucketTaskBoardTaskFormatRequest
{
/// <summary>
/// Constructs a new PlannerBucketTaskBoardTaskFormatRequest.
/// </summary>
/// <param name="requestUrl">The URL for the built request.</param>
/// <param name="client">The <see cref="IBaseClient"/> for handling requests.</param>
/// <param name="options">Query and header option name value pairs for the request.</param>
public PlannerBucketTaskBoardTaskFormatRequest(
string requestUrl,
IBaseClient client,
IEnumerable<Option> options)
: base(requestUrl, client, options)
{
}
/// <summary>
/// Creates the specified PlannerBucketTaskBoardTaskFormat using POST.
/// </summary>
/// <param name="plannerBucketTaskBoardTaskFormatToCreate">The PlannerBucketTaskBoardTaskFormat to create.</param>
/// <returns>The created PlannerBucketTaskBoardTaskFormat.</returns>
public System.Threading.Tasks.Task<PlannerBucketTaskBoardTaskFormat> CreateAsync(PlannerBucketTaskBoardTaskFormat plannerBucketTaskBoardTaskFormatToCreate)
{
return this.CreateAsync(plannerBucketTaskBoardTaskFormatToCreate, CancellationToken.None);
}
/// <summary>
/// Creates the specified PlannerBucketTaskBoardTaskFormat using POST.
/// </summary>
/// <param name="plannerBucketTaskBoardTaskFormatToCreate">The PlannerBucketTaskBoardTaskFormat to create.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The created PlannerBucketTaskBoardTaskFormat.</returns>
public async System.Threading.Tasks.Task<PlannerBucketTaskBoardTaskFormat> CreateAsync(PlannerBucketTaskBoardTaskFormat plannerBucketTaskBoardTaskFormatToCreate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "POST";
var newEntity = await this.SendAsync<PlannerBucketTaskBoardTaskFormat>(plannerBucketTaskBoardTaskFormatToCreate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(newEntity);
return newEntity;
}
/// <summary>
/// Deletes the specified PlannerBucketTaskBoardTaskFormat.
/// </summary>
/// <returns>The task to await.</returns>
public System.Threading.Tasks.Task DeleteAsync()
{
return this.DeleteAsync(CancellationToken.None);
}
/// <summary>
/// Deletes the specified PlannerBucketTaskBoardTaskFormat.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The task to await.</returns>
public async System.Threading.Tasks.Task DeleteAsync(CancellationToken cancellationToken)
{
this.Method = "DELETE";
await this.SendAsync<PlannerBucketTaskBoardTaskFormat>(null, cancellationToken).ConfigureAwait(false);
}
/// <summary>
/// Gets the specified PlannerBucketTaskBoardTaskFormat.
/// </summary>
/// <returns>The PlannerBucketTaskBoardTaskFormat.</returns>
public System.Threading.Tasks.Task<PlannerBucketTaskBoardTaskFormat> GetAsync()
{
return this.GetAsync(CancellationToken.None);
}
/// <summary>
/// Gets the specified PlannerBucketTaskBoardTaskFormat.
/// </summary>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The PlannerBucketTaskBoardTaskFormat.</returns>
public async System.Threading.Tasks.Task<PlannerBucketTaskBoardTaskFormat> GetAsync(CancellationToken cancellationToken)
{
this.Method = "GET";
var retrievedEntity = await this.SendAsync<PlannerBucketTaskBoardTaskFormat>(null, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(retrievedEntity);
return retrievedEntity;
}
/// <summary>
/// Updates the specified PlannerBucketTaskBoardTaskFormat using PATCH.
/// </summary>
/// <param name="plannerBucketTaskBoardTaskFormatToUpdate">The PlannerBucketTaskBoardTaskFormat to update.</param>
/// <returns>The updated PlannerBucketTaskBoardTaskFormat.</returns>
public System.Threading.Tasks.Task<PlannerBucketTaskBoardTaskFormat> UpdateAsync(PlannerBucketTaskBoardTaskFormat plannerBucketTaskBoardTaskFormatToUpdate)
{
return this.UpdateAsync(plannerBucketTaskBoardTaskFormatToUpdate, CancellationToken.None);
}
/// <summary>
/// Updates the specified PlannerBucketTaskBoardTaskFormat using PATCH.
/// </summary>
/// <param name="plannerBucketTaskBoardTaskFormatToUpdate">The PlannerBucketTaskBoardTaskFormat to update.</param>
/// <param name="cancellationToken">The <see cref="CancellationToken"/> for the request.</param>
/// <returns>The updated PlannerBucketTaskBoardTaskFormat.</returns>
public async System.Threading.Tasks.Task<PlannerBucketTaskBoardTaskFormat> UpdateAsync(PlannerBucketTaskBoardTaskFormat plannerBucketTaskBoardTaskFormatToUpdate, CancellationToken cancellationToken)
{
this.ContentType = "application/json";
this.Method = "PATCH";
var updatedEntity = await this.SendAsync<PlannerBucketTaskBoardTaskFormat>(plannerBucketTaskBoardTaskFormatToUpdate, cancellationToken).ConfigureAwait(false);
this.InitializeCollectionProperties(updatedEntity);
return updatedEntity;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="value">The expand value.</param>
/// <returns>The request object to send.</returns>
public IPlannerBucketTaskBoardTaskFormatRequest Expand(string value)
{
this.QueryOptions.Add(new QueryOption("$expand", value));
return this;
}
/// <summary>
/// Adds the specified expand value to the request.
/// </summary>
/// <param name="expandExpression">The expression from which to calculate the expand value.</param>
/// <returns>The request object to send.</returns>
public IPlannerBucketTaskBoardTaskFormatRequest Expand(Expression<Func<PlannerBucketTaskBoardTaskFormat, object>> expandExpression)
{
if (expandExpression == null)
{
throw new ArgumentNullException(nameof(expandExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(expandExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(expandExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$expand", value));
}
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="value">The select value.</param>
/// <returns>The request object to send.</returns>
public IPlannerBucketTaskBoardTaskFormatRequest Select(string value)
{
this.QueryOptions.Add(new QueryOption("$select", value));
return this;
}
/// <summary>
/// Adds the specified select value to the request.
/// </summary>
/// <param name="selectExpression">The expression from which to calculate the select value.</param>
/// <returns>The request object to send.</returns>
public IPlannerBucketTaskBoardTaskFormatRequest Select(Expression<Func<PlannerBucketTaskBoardTaskFormat, object>> selectExpression)
{
if (selectExpression == null)
{
throw new ArgumentNullException(nameof(selectExpression));
}
string error;
string value = ExpressionExtractHelper.ExtractMembers(selectExpression, out error);
if (value == null)
{
throw new ArgumentException(error, nameof(selectExpression));
}
else
{
this.QueryOptions.Add(new QueryOption("$select", value));
}
return this;
}
/// <summary>
/// Initializes any collection properties after deserialization, like next requests for paging.
/// </summary>
/// <param name="plannerBucketTaskBoardTaskFormatToInitialize">The <see cref="PlannerBucketTaskBoardTaskFormat"/> with the collection properties to initialize.</param>
private void InitializeCollectionProperties(PlannerBucketTaskBoardTaskFormat plannerBucketTaskBoardTaskFormatToInitialize)
{
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
/*============================================================
**
**
**
** Purpose: Convenient wrapper for an array, an offset, and
** a count. Ideally used in streams & collections.
** Net Classes will consume an array of these.
**
**
===========================================================*/
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics;
using System.Diagnostics.Contracts;
namespace System
{
// Note: users should make sure they copy the fields out of an ArraySegment onto their stack
// then validate that the fields describe valid bounds within the array. This must be done
// because assignments to value types are not atomic, and also because one thread reading
// three fields from an ArraySegment may not see the same ArraySegment from one call to another
// (ie, users could assign a new value to the old location).
[Serializable]
[System.Runtime.CompilerServices.TypeForwardedFrom("mscorlib, Version=4.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")]
public struct ArraySegment<T> : IList<T>, IReadOnlyList<T>
{
// Do not replace the array allocation with Array.Empty. We don't want to have the overhead of
// instantiating another generic type in addition to ArraySegment<T> for new type parameters.
public static ArraySegment<T> Empty { get; } = new ArraySegment<T>(new T[0]);
private readonly T[] _array;
private readonly int _offset;
private readonly int _count;
public ArraySegment(T[] array)
{
if (array == null)
ThrowHelper.ThrowArgumentNullException(ExceptionArgument.array);
Contract.EndContractBlock();
_array = array;
_offset = 0;
_count = array.Length;
}
public ArraySegment(T[] array, int offset, int count)
{
// Validate arguments, check is minimal instructions with reduced branching for inlinable fast-path
// Negative values discovered though conversion to high values when converted to unsigned
// Failure should be rare and location determination and message is delegated to failure functions
if (array == null || (uint)offset > (uint)array.Length || (uint)count > (uint)(array.Length - offset))
ThrowHelper.ThrowArraySegmentCtorValidationFailedExceptions(array, offset, count);
Contract.EndContractBlock();
_array = array;
_offset = offset;
_count = count;
}
public T[] Array => _array;
public int Offset => _offset;
public int Count => _count;
public T this[int index]
{
get
{
if ((uint)index >= (uint)_count)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index);
}
return _array[_offset + index];
}
set
{
if ((uint)index >= (uint)_count)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index);
}
_array[_offset + index] = value;
}
}
public Enumerator GetEnumerator()
{
ThrowInvalidOperationIfDefault();
return new Enumerator(this);
}
public override int GetHashCode()
{
if (_array == null)
{
return 0;
}
int hash = 5381;
hash = System.Numerics.Hashing.HashHelpers.Combine(hash, _offset);
hash = System.Numerics.Hashing.HashHelpers.Combine(hash, _count);
// The array hash is expected to be an evenly-distributed mixture of bits,
// so rather than adding the cost of another rotation we just xor it.
hash ^= _array.GetHashCode();
return hash;
}
public void CopyTo(T[] destination) => CopyTo(destination, 0);
public void CopyTo(T[] destination, int destinationIndex)
{
ThrowInvalidOperationIfDefault();
System.Array.Copy(_array, _offset, destination, destinationIndex, _count);
}
public void CopyTo(ArraySegment<T> destination)
{
ThrowInvalidOperationIfDefault();
destination.ThrowInvalidOperationIfDefault();
if (_count > destination._count)
{
ThrowHelper.ThrowArgumentException_DestinationTooShort();
}
System.Array.Copy(_array, _offset, destination._array, destination._offset, _count);
}
public override bool Equals(Object obj)
{
if (obj is ArraySegment<T>)
return Equals((ArraySegment<T>)obj);
else
return false;
}
public bool Equals(ArraySegment<T> obj)
{
return obj._array == _array && obj._offset == _offset && obj._count == _count;
}
public ArraySegment<T> Slice(int index)
{
ThrowInvalidOperationIfDefault();
if ((uint)index > (uint)_count)
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index);
}
return new ArraySegment<T>(_array, _offset + index, _count - index);
}
public ArraySegment<T> Slice(int index, int count)
{
ThrowInvalidOperationIfDefault();
if ((uint)index > (uint)_count || (uint)count > (uint)(_count - index))
{
ThrowHelper.ThrowArgumentOutOfRangeException(ExceptionArgument.index);
}
return new ArraySegment<T>(_array, _offset + index, count);
}
public T[] ToArray()
{
ThrowInvalidOperationIfDefault();
if (_count == 0)
{
return Empty._array;
}
var array = new T[_count];
System.Array.Copy(_array, _offset, array, 0, _count);
return array;
}
public static bool operator ==(ArraySegment<T> a, ArraySegment<T> b)
{
return a.Equals(b);
}
public static bool operator !=(ArraySegment<T> a, ArraySegment<T> b)
{
return !(a == b);
}
public static implicit operator ArraySegment<T>(T[] array) => new ArraySegment<T>(array);
#region IList<T>
T IList<T>.this[int index]
{
get
{
ThrowInvalidOperationIfDefault();
if (index < 0 || index >= _count)
ThrowHelper.ThrowArgumentOutOfRange_IndexException();
Contract.EndContractBlock();
return _array[_offset + index];
}
set
{
ThrowInvalidOperationIfDefault();
if (index < 0 || index >= _count)
ThrowHelper.ThrowArgumentOutOfRange_IndexException();
Contract.EndContractBlock();
_array[_offset + index] = value;
}
}
int IList<T>.IndexOf(T item)
{
ThrowInvalidOperationIfDefault();
int index = System.Array.IndexOf<T>(_array, item, _offset, _count);
Debug.Assert(index == -1 ||
(index >= _offset && index < _offset + _count));
return index >= 0 ? index - _offset : -1;
}
void IList<T>.Insert(int index, T item)
{
ThrowHelper.ThrowNotSupportedException();
}
void IList<T>.RemoveAt(int index)
{
ThrowHelper.ThrowNotSupportedException();
}
#endregion
#region IReadOnlyList<T>
T IReadOnlyList<T>.this[int index]
{
get
{
ThrowInvalidOperationIfDefault();
if (index < 0 || index >= _count)
ThrowHelper.ThrowArgumentOutOfRange_IndexException();
Contract.EndContractBlock();
return _array[_offset + index];
}
}
#endregion IReadOnlyList<T>
#region ICollection<T>
bool ICollection<T>.IsReadOnly
{
get
{
// the indexer setter does not throw an exception although IsReadOnly is true.
// This is to match the behavior of arrays.
return true;
}
}
void ICollection<T>.Add(T item)
{
ThrowHelper.ThrowNotSupportedException();
}
void ICollection<T>.Clear()
{
ThrowHelper.ThrowNotSupportedException();
}
bool ICollection<T>.Contains(T item)
{
ThrowInvalidOperationIfDefault();
int index = System.Array.IndexOf<T>(_array, item, _offset, _count);
Debug.Assert(index == -1 ||
(index >= _offset && index < _offset + _count));
return index >= 0;
}
bool ICollection<T>.Remove(T item)
{
ThrowHelper.ThrowNotSupportedException();
return default(bool);
}
#endregion
#region IEnumerable<T>
IEnumerator<T> IEnumerable<T>.GetEnumerator() => GetEnumerator();
#endregion
#region IEnumerable
IEnumerator IEnumerable.GetEnumerator() => GetEnumerator();
#endregion
private void ThrowInvalidOperationIfDefault()
{
if (_array == null)
{
ThrowHelper.ThrowInvalidOperationException(ExceptionResource.InvalidOperation_NullArray);
}
}
public struct Enumerator : IEnumerator<T>
{
private readonly T[] _array;
private readonly int _start;
private readonly int _end; // cache Offset + Count, since it's a little slow
private int _current;
internal Enumerator(ArraySegment<T> arraySegment)
{
Contract.Requires(arraySegment.Array != null);
Contract.Requires(arraySegment.Offset >= 0);
Contract.Requires(arraySegment.Count >= 0);
Contract.Requires(arraySegment.Offset + arraySegment.Count <= arraySegment.Array.Length);
_array = arraySegment.Array;
_start = arraySegment.Offset;
_end = arraySegment.Offset + arraySegment.Count;
_current = arraySegment.Offset - 1;
}
public bool MoveNext()
{
if (_current < _end)
{
_current++;
return (_current < _end);
}
return false;
}
public T Current
{
get
{
if (_current < _start)
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumNotStarted();
if (_current >= _end)
ThrowHelper.ThrowInvalidOperationException_InvalidOperation_EnumEnded();
return _array[_current];
}
}
object IEnumerator.Current => Current;
void IEnumerator.Reset()
{
_current = _start - 1;
}
public void Dispose()
{
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace TernaryTreeGenerator
{
using System;
using System.IO;
using System.Text;
using System.Diagnostics;
using Microsoft.Xml;
//
// HTML Element/Attribute name <-> XSL output property mapping
//
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="HtmlProps"]/*' />
public class HtmlProps
{
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="HtmlProps.nameHTML;"]/*' />
public String nameHTML;
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="HtmlProps.properties;"]/*' />
public byte properties;
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="HtmlProps.visited;"]/*' />
public bool visited;
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="HtmlProps.HtmlProps"]/*' />
public HtmlProps(String nameHTML, byte properties)
{
this.nameHTML = nameHTML;
this.properties = properties;
}
}
//
// TernaryTreeRW // -------------
//
// NOTE: This class is only meant to be used to generate static tables to be used
// later with TernaryTreeRO. It should not be used in production code.
//
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW"]/*' />
public class TernaryTreeRW
{
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.MAX_NODES"]/*' />
public const int MAX_NODES = 2000;
// Since _lt and _gt are just offsets in the node buffer, the node buffer size is limited
private byte[,] _nodeBuffer;
//node indexer;
private int _endNodePos;
private int _numHtmlElements;
private int _numHtmlAttributes;
private static HtmlProps[] s_htmlElements = {
new HtmlProps("a", (byte)(ElementProperties.URI_PARENT)),
new HtmlProps("address", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("applet", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("area", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.URI_PARENT | ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("base", (byte)(ElementProperties.URI_PARENT | ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("basefont", (byte)(ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("blockquote", (byte)(ElementProperties.URI_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("body", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("br", (byte)(ElementProperties.EMPTY)),
new HtmlProps("button", (byte)(ElementProperties.BOOL_PARENT)),
new HtmlProps("caption", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("center", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("col", (byte)(ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("colgroup", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("dd", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("del", (byte)(ElementProperties.URI_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("dir", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("div", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("dl", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("dt", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("fieldset", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("font", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("form", (byte)(ElementProperties.URI_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("frame", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("frameset", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("h1", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("h2", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("h3", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("h4", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("h5", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("h6", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("head", (byte)(ElementProperties.URI_PARENT | ElementProperties.BLOCK_WS | ElementProperties.HEAD)),
new HtmlProps("hr", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("html", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("iframe", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("img", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.URI_PARENT | ElementProperties.EMPTY)),
new HtmlProps("input", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.URI_PARENT | ElementProperties.EMPTY)),
new HtmlProps("ins", (byte)(ElementProperties.URI_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("isindex", (byte)(ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("legend", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("li", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("link", (byte)(ElementProperties.URI_PARENT | ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("map", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("menu", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("meta", (byte)(ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("noframes", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("noscript", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("object", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.URI_PARENT)),
new HtmlProps("ol", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("optgroup", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("option", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("p", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("param", (byte)(ElementProperties.EMPTY | ElementProperties.BLOCK_WS)),
new HtmlProps("pre", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("q", (byte)(ElementProperties.URI_PARENT)),
new HtmlProps("s", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("script", (byte)(ElementProperties.NO_ENTITIES | ElementProperties.BOOL_PARENT | ElementProperties.URI_PARENT)),
new HtmlProps("select", (byte)(ElementProperties.BOOL_PARENT)),
new HtmlProps("strike", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("style", (byte)(ElementProperties.NO_ENTITIES | ElementProperties.BLOCK_WS)),
new HtmlProps("table", (byte)(ElementProperties.URI_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("tbody", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("td", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("textarea", (byte)(ElementProperties.BOOL_PARENT)),
new HtmlProps("tfoot", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("th", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("thead", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("title", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("tr", (byte)(ElementProperties.BLOCK_WS)),
new HtmlProps("ul", (byte)(ElementProperties.BOOL_PARENT | ElementProperties.BLOCK_WS)),
new HtmlProps("xmp", (byte)(ElementProperties.BLOCK_WS)),
};
private static HtmlProps[] s_htmlAttributes = {
new HtmlProps("action", (byte)AttributeProperties.URI),
new HtmlProps("checked", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("cite", (byte)AttributeProperties.URI),
new HtmlProps("classid", (byte)AttributeProperties.URI),
new HtmlProps("codebase", (byte)AttributeProperties.URI),
new HtmlProps("compact", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("data", (byte)AttributeProperties.URI),
new HtmlProps("datasrc", (byte)AttributeProperties.URI),
new HtmlProps("declare", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("defer", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("disabled", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("for", (byte)AttributeProperties.URI),
new HtmlProps("href", (byte)AttributeProperties.URI),
new HtmlProps("ismap", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("longdesc", (byte)AttributeProperties.URI),
new HtmlProps("multiple", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("name", (byte)AttributeProperties.URI),
new HtmlProps("nohref", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("noresize", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("noshade", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("nowrap", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("profile", (byte)AttributeProperties.URI),
new HtmlProps("readonly", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("selected", (byte)AttributeProperties.BOOLEAN),
new HtmlProps("src", (byte)AttributeProperties.URI),
new HtmlProps("usemap", (byte)AttributeProperties.URI),
};
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.TernaryTreeRW"]/*' />
public TernaryTreeRW()
{
//create the two dimentional byte array
_nodeBuffer = new byte[MAX_NODES, 4];
_numHtmlElements = s_htmlElements.Length;
_numHtmlAttributes = s_htmlAttributes.Length;
_endNodePos = 1;
}
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.lengthNodes"]/*' />
public int lengthNodes()
{
return _endNodePos;
}
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.lengthBytes"]/*' />
public int lengthBytes()
{
return _endNodePos;
}
// ----------------------------------------------------------------------------
// addCaseInsensitiveString()
//
// Insert a string into this ternary tree. Assert if it's already been inserted
// or if the path offset variables (_lt, _gt) are overflowed.
//
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.AddCaseInsensitiveString"]/*' />
public void AddCaseInsensitiveString(String stringToAdd, byte data)
{
int charToAdd;
int charInTheTree;
int stringPos = 0;
int nodePos = 0;
if (stringToAdd.Length == 0)
{
charToAdd = 0;
}
else
{
charToAdd = stringToAdd[0];
// Normalize to upper case
if (charToAdd >= 'a' && charToAdd <= 'z') charToAdd -= ('a' - 'A');
}
do
{
charInTheTree = _nodeBuffer[nodePos, (int)TernaryTreeByte.characterByte];
//Console.Write("charToAdd: {0},charInTheTree: {1}, nodePos: {2}, endNodePos {3}, ", Convert.ToChar(charToAdd), Convert.ToChar(charInTheTree), nodePos, endNodePos);
//Console.WriteLine("Left :{0}, Right:{0}", nodeBuffer[nodePos, (int)TernaryTreeByte.leftTree], nodeBuffer[nodePos, (int)TernaryTreeByte.rightTree]);
if (charToAdd == charInTheTree)
{
// This character already appears in the tree; take the equals path
nodePos++;
stringPos++;
if (stringPos == stringToAdd.Length)
{
charToAdd = 0;
}
else
{
charToAdd = stringToAdd[stringPos];
// Normalize to upper case
if (charToAdd >= 'a' && charToAdd <= 'z') charToAdd -= ('a' - 'A');
}
}
else
{
if (charToAdd < charInTheTree)
{
if (_nodeBuffer[nodePos, (int)TernaryTreeByte.leftTree] == 0x0)
{
// Create a new path less than the current character
if (_endNodePos - nodePos > 255)
{
throw new Exception("Too many characters have been added");
}
else
{
_nodeBuffer[nodePos, (int)TernaryTreeByte.leftTree] = (byte)(_endNodePos - nodePos);
}
}
else
{
// Take the existing less than path
nodePos = nodePos + _nodeBuffer[nodePos, (int)TernaryTreeByte.leftTree];
continue;
}
}
else
{
if (_nodeBuffer[nodePos, (int)TernaryTreeByte.rightTree] == 0x0)
{
// Create a new path greater than the current character
if (_endNodePos - nodePos > 255)
{
throw new Exception("Too many characters have been added");
}
else
{
_nodeBuffer[nodePos, (int)TernaryTreeByte.rightTree] = (byte)(_endNodePos - nodePos);
}
}
else
{
// Take the existing greater than path
nodePos = nodePos + _nodeBuffer[nodePos, (int)TernaryTreeByte.rightTree];
continue;
}
}
// Add new nodes to hold rest of string
for (int i = stringPos; i < stringToAdd.Length; ++i)
{
charToAdd = stringToAdd[i];
// Normalize to upper case
if (charToAdd >= 'a' && charToAdd <= 'z') charToAdd -= ('a' - 'A');
nodePos = _endNodePos++;
_nodeBuffer[nodePos, (int)TernaryTreeByte.characterByte] = (byte)charToAdd;
}
// Store terminating null to indicate that this is the end of the string
nodePos = _endNodePos++;
_nodeBuffer[nodePos, (int)TernaryTreeByte.characterByte] = 0;
_nodeBuffer[nodePos, (int)TernaryTreeByte.data] = data;
break;
}
} while (true);
}
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.GenerateHtmlElementTable"]/*' />
public void GenerateHtmlElementTable()
{
// Verify that the element and attribute tables are sorted correctly
s_htmlElements[0].visited = false;
for (int i = 1; i < _numHtmlElements; i++)
{
s_htmlElements[i].visited = false;
if (s_htmlElements[i - 1].nameHTML.CompareTo(s_htmlElements[i].nameHTML) > 0)
{
throw new Exception("String table not correctly sorted");
}
}
// Add strings from the tables to the ternary trees
AddMidHtmlElement(0, _numHtmlElements - 1);
}
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.GenerateHtmlAttributeTable"]/*' />
public void GenerateHtmlAttributeTable()
{
// Verify that the element and attribute tables are sorted correctly
s_htmlAttributes[0].visited = false;
for (int i = 1; i < _numHtmlAttributes; i++)
{
s_htmlAttributes[i].visited = false;
if (s_htmlAttributes[i - 1].nameHTML.CompareTo(s_htmlAttributes[i].nameHTML) > 0)
{
throw new Exception("String table not correctly sorted");
}
}
// Add strings from the tables to the ternary trees
AddMidHtmlAttribute(0, _numHtmlAttributes - 1);
}
// ----------------------------------------------------------------------------
// addMidHtml()
//
// Given a sorted list of HTML names/properties, adds the middle name to the
// ternary tree and partitions the remaining halves, which are then recursively
// sent to this procedure.
//
private void AddMidHtmlElement(int startPos, int endPos)
{
int midPos;
if (startPos > endPos)
return;
midPos = (startPos + endPos) / 2;
AddCaseInsensitiveString(s_htmlElements[midPos].nameHTML, s_htmlElements[midPos].properties);
AddMidHtmlElement(startPos, midPos - 1);
AddMidHtmlElement(midPos + 1, endPos);
}
// ----------------------------------------------------------------------------
// addMidHtml()
//
// Given a sorted list of HTML names/properties, adds the middle name to the
// ternary tree and partitions the remaining halves, which are then recursively
// sent to this procedure.
//
private void AddMidHtmlAttribute(int startPos, int endPos)
{
int midPos;
if (startPos > endPos)
return;
midPos = (startPos + endPos) / 2;
AddCaseInsensitiveString(s_htmlAttributes[midPos].nameHTML, s_htmlAttributes[midPos].properties);
AddMidHtmlAttribute(startPos, midPos - 1);
AddMidHtmlAttribute(midPos + 1, endPos);
}
//----------------------------------------------------------------------------
// OutputFileHeader()
//
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.OutputFileHeader"]/*' />
public void OutputFileHeader(StreamWriter streamWriter)
{
string indent1 = " ";
streamWriter.WriteLine("//------------------------------------------------------------------------------");
streamWriter.WriteLine("// <copyright file=\"TernaryTreeGenerator.cs\" company=\"Microsoft\">");
streamWriter.WriteLine("// Copyright (c) Microsoft Corporation. All rights reserved.");
streamWriter.WriteLine("// </copyright>");
streamWriter.WriteLine("// <owner current=\"true\" primary=\"true\">derekdb</owner>");
streamWriter.WriteLine("//------------------------------------------------------------------------------");
streamWriter.WriteLine("//");
streamWriter.WriteLine("// This file is generated by TernaryTreeGenerator.cs,");
streamWriter.WriteLine("// and is used by the TernaryTreeRO class.\n");
streamWriter.WriteLine("//");
streamWriter.WriteLine("// It contains the state for a ternary tree used to map HTML\n");
streamWriter.WriteLine("// keywords to XSL output properties.\n");
streamWriter.WriteLine("//");
streamWriter.WriteLine("// Do not modify this file directly! (as if you could)\n");
streamWriter.WriteLine("//");
streamWriter.WriteLine("namespace Microsoft.Xml");
streamWriter.WriteLine("{0}internal abstract class HtmlTernaryTree {{", indent1);
}
//
// Dump a C-style BYTE table to the output file. This table will contain the
// persisted state of this ternary tree.
//
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.OutputFileFooter"]/*' />
public void OutputFileFooter(StreamWriter streamWriter)
{
streamWriter.WriteLine(" }");
streamWriter.WriteLine("}");
streamWriter.Flush();
}
//----------------------------------------------------------------------------
// dumpTree()
//
// Dump a C-style BYTE table to the output file. This table will contain the
// persisted state of this ternary tree.
//
/// <include file='doc\TernaryTreeGenerator.uex' path='docs/doc[@for="TernaryTreeRW.dumpTree"]/*' />
public void dumpTree(StreamWriter streamWriter, String wszName)
{
int n = 0;
string indent3 = " ";
string indent2 = " ";
//string indent1 = " ";
streamWriter.WriteLine("{0}internal static byte [] {1} = {{", indent2, wszName);
streamWriter.Write(indent3);
for (int pos = 1; pos < _endNodePos; pos++)
{
streamWriter.Write("{0},{1},{2},{3},", _nodeBuffer[pos, (int)TernaryTreeByte.characterByte],
_nodeBuffer[pos, (int)TernaryTreeByte.leftTree],
_nodeBuffer[pos, (int)TernaryTreeByte.rightTree],
_nodeBuffer[pos, (int)TernaryTreeByte.data]);
n++;
if (n % 7 == 0)
{
streamWriter.WriteLine();
streamWriter.Write(indent3);
}
else
{
streamWriter.Write(" ");
}
}
streamWriter.WriteLine();
streamWriter.WriteLine("{0}}};", indent2);
streamWriter.Flush();
}
}
}
| |
using System;
using System.Collections;
using System.Xml;
using System.IO;
using System.Text;
using System.Reflection;
using System.Text.RegularExpressions;
using Mono.CompilerServices.SymbolWriter;
using Mono.Cecil;
using Mono.Cecil.Metadata;
namespace MonoCov
{
public delegate void CoverageProgress (string item, double percent);
public class CoverageModel : CoverageItem {
private string dataFileName;
private Hashtable namespaces;
private Hashtable classes;
private Hashtable sources;
private Hashtable loadedAssemblies;
private Hashtable symbolFiles;
public event CoverageProgress Progress;
/**
* List of filters, which are strings
*/
private ArrayList filters;
public CoverageModel ()
{
dataFileName = string.Empty;
namespaces = new Hashtable ();
classes = new Hashtable ();
sources = new Hashtable ();
filters = new ArrayList ();
Progress += delegate {}; // better than having to check every time...
}
public Hashtable Classes {
get {
return classes;
}
}
public Hashtable Namespaces {
get {
return namespaces;
}
}
public void AddFilter (String pattern) {
filters.Add (pattern);
}
private bool IsFiltered (string name)
{
// Check positive filters first
bool hasPositive = false;
bool found = false;
foreach (String pattern in filters) {
if (pattern [0] == '+') {
string p = pattern.Substring (1);
if (name.IndexOf (p) != -1) {
//Console.WriteLine ("FILTERED: " + pattern + " -> " + name);
found = true;
}
hasPositive = true;
}
}
if (hasPositive && !found)
return true;
foreach (String pattern in filters) {
if (pattern [0] == '-') {
string p = pattern.Substring (1);
if (name.IndexOf (p) != -1) {
//Console.WriteLine ("FILTERED: " + pattern + " -> " + name);
return true;
}
}
}
return false;
}
private void LoadAssemblies (XmlDocument dom)
{
foreach (XmlNode n in dom.GetElementsByTagName ("assembly")) {
string assemblyName = n.Attributes ["name"].Value;
string guid = n.Attributes ["guid"].Value;
string filename = n.Attributes ["filename"].Value;
MonoSymbolFile symbolFile;
if (!File.Exists (filename)) {
string newFilename = Path.Combine(Path.GetDirectoryName (dataFileName), Path.GetFileName (filename));
if (File.Exists (newFilename))
filename = newFilename;
}
#if USE_REFLECTION
Assembly assembly = Assembly.Load (assemblyName);
MethodInfo getguid = typeof (Module).GetMethod (
"Mono_GetGuid", BindingFlags.Instance|BindingFlags.Public|BindingFlags.NonPublic,
null, CallingConventions.Any, new Type [0], null);
if (getguid != null) {
Guid assembly_guid = (Guid)getguid.Invoke (assembly.GetLoadedModules ()[0], new object [0]);
Console.WriteLine (assembly_guid);
if (assembly_guid != new Guid (guid)) {
Console.WriteLine ("WARNING: Loaded version of assembly " + assembly + " is different from the version used to collect coverage data.");
}
} else {
Console.WriteLine ("WARNING: Can't verify the guid of " + assembly);
}
loadedAssemblies [assemblyName] = assembly;
Console.Write ("Reading symbols for " + assembly + " ...");
symbolFile = MonoSymbolFile.ReadSymbolFile (assembly);
if (symbolFile == null)
Console.WriteLine (" (No symbols found)");
else {
symbolFiles [assembly] = symbolFile;
Console.WriteLine (" (" + symbolFile.SourceCount + " files, " + symbolFile.MethodCount + " methods)");
}
#else
AssemblyDefinition assembly = AssemblyFactory.GetAssembly (filename);
ModuleDefinition module = assembly.MainModule;
if (module.Mvid != new Guid (guid)) {
Console.WriteLine ("WARNING: Loaded version of assembly " + assembly + " is different from the version used to collect coverage data.");
}
loadedAssemblies [assemblyName] = assembly;
Console.Write ("Reading symbols for " + assemblyName + " ...");
symbolFile = MonoSymbolFile.ReadSymbolFile (filename + ".mdb");
if (symbolFile == null)
Console.WriteLine (" (No symbols found)");
else {
symbolFiles [assembly] = symbolFile;
Console.WriteLine (" (" + symbolFile.SourceCount + " files, " + symbolFile.MethodCount + " methods)");
}
#endif
}
}
private void LoadFilters (XmlDocument dom)
{
foreach (XmlNode n in dom.GetElementsByTagName ("filter")) {
AddFilter (n.Attributes ["pattern"].Value);
}
}
#if USE_REFLECTION
static Type LoadType (Assembly assembly, string name) {
Type type = assembly.GetType (name);
if (type != null)
return type;
int last_dot = name.LastIndexOf ('.');
// covert names from IL to reflection naming
// needed to deal with nested types
while (last_dot >= 0) {
StringBuilder sb = new StringBuilder (name);
sb [last_dot] = '/';
name = sb.ToString ();
type = assembly.GetType (name);
if (type != null)
return type;
last_dot = name.LastIndexOf ('.');
}
return null;
}
#else
static TypeDefinition LoadType (AssemblyDefinition assembly, string name) {
TypeDefinition type = assembly.MainModule.Types [name];
if (type != null)
return type;
int last_dot = name.LastIndexOf ('.');
// covert names from IL to reflection naming
// needed to deal with nested types
while (last_dot >= 0) {
StringBuilder sb = new StringBuilder (name);
sb [last_dot] = '/';
name = sb.ToString ();
type = assembly.MainModule.Types [name];
if (type != null)
return type;
last_dot = name.LastIndexOf ('.');
}
return null;
}
#endif
public void ReadFromFile (string fileName)
{
dataFileName = fileName;
namespaces = new Hashtable ();
classes = new Hashtable ();
long begin = DateTime.Now.Ticks / 10000;
long msec = DateTime.Now.Ticks / 10000;
long msec2;
loadedAssemblies = new Hashtable ();
symbolFiles = new Hashtable ();
XmlDocument dom = new XmlDocument ();
Progress ("XML reading", 0);
Console.Write ("Loading " + fileName + "...");
dom.Load (new XmlTextReader (new FileStream (fileName, FileMode.Open)));
Console.WriteLine (" Done.");
msec2 = DateTime.Now.Ticks / 10000;
Console.WriteLine ("XML Reading: " + (msec2 - msec) + " msec");
msec = msec2;
Progress ("Load assemblies", 0.2);
LoadAssemblies (dom);
LoadFilters (dom);
msec2 = DateTime.Now.Ticks / 10000;
Console.WriteLine ("Load assemblies: " + (msec2 - msec) + " msec");
msec = msec2;
Progress ("Load methods", 0.4);
foreach (XmlNode n in dom.GetElementsByTagName ("method")) {
string assemblyName = n.Attributes ["assembly"].Value;
string className = n.Attributes ["class"].Value;
string methodName = n.Attributes ["name"].Value;
string token = n.Attributes ["token"].Value;
string cov_info = n.FirstChild.Value;
int itok = int.Parse (token);
#if USE_REFLECTION
Assembly assembly = (Assembly)loadedAssemblies [assemblyName];
MonoSymbolFile symbolFile = (MonoSymbolFile)symbolFiles [assembly];
if (symbolFile == null)
continue;
Type t = LoadType (assembly, className);
if (t == null) {
Console.WriteLine ("ERROR: Unable to resolve type " + className + " in " + assembly);
continue;
}
ClassCoverageItem klass = ProcessClass (t);
MethodEntry entry = symbolFile.GetMethodByToken (Int32.Parse (token));
Module[] modules = assembly.GetModules();
if (modules.Length > 1)
Console.WriteLine("WARNING: Assembly had more than one module. Using the first.");
Module module = modules[0];
MethodBase monoMethod = module.ResolveMethod(Int32.Parse(token));
ProcessMethod (monoMethod, entry, klass, methodName, cov_info);
#else
if ((TokenType)(itok & 0xff000000) != TokenType.Method)
continue;
AssemblyDefinition assembly = (AssemblyDefinition)loadedAssemblies [assemblyName];
MonoSymbolFile symbolFile = (MonoSymbolFile)symbolFiles [assembly];
if (symbolFile == null)
continue;
TypeDefinition t = LoadType (assembly, className);
if (t == null) {
Console.WriteLine ("ERROR: Unable to resolve type " + className + " in " + assembly);
continue;
}
ClassCoverageItem klass = ProcessClass (t);
MethodEntry entry = symbolFile.GetMethodByToken (itok);
MethodDefinition monoMethod = assembly.MainModule.LookupByToken (
new MetadataToken ((TokenType)(itok & 0xff000000), (uint)(itok & 0xffffff)))
as MethodDefinition;
//Console.WriteLine (monoMethod);
ProcessMethod (monoMethod, entry, klass, methodName, cov_info);
#endif
}
msec2 = DateTime.Now.Ticks / 10000;
Console.WriteLine ("Process methods: " + (msec2 - msec) + " msec");
msec = msec2;
// Add info for klasses for which we have no coverage
#if USE_REFLECTION
foreach (Assembly assembly in loadedAssemblies.Values) {
foreach (Type t in assembly.GetTypes ()) {
ProcessClass (t);
}
}
// Add info for methods for which we have no coverage
foreach (ClassCoverageItem klass in classes.Values) {
foreach (MethodInfo mb in klass.type.GetMethods (BindingFlags.NonPublic|BindingFlags.Public|BindingFlags.Static|BindingFlags.Instance | BindingFlags.DeclaredOnly)) {
MonoSymbolFile symbolFile = (MonoSymbolFile)symbolFiles [klass.type.Assembly];
if (symbolFile == null)
continue;
if (! klass.methodsByMethod.ContainsKey (mb)) {
MethodEntry entry = symbolFile.GetMethod (mb);
ProcessMethod (mb, entry, klass, mb.Name, null);
}
}
}
#else
Progress ("Not covered classes", 0.6);
foreach (AssemblyDefinition assembly in loadedAssemblies.Values) {
foreach (TypeDefinition t in assembly.MainModule.Types) {
ProcessClass (t);
}
}
Progress ("Not covered methods", 0.7);
// Add info for methods for which we have no coverage
foreach (ClassCoverageItem klass in classes.Values) {
foreach (MethodDefinition mb in klass.type.Methods) {
MonoSymbolFile symbolFile = (MonoSymbolFile)symbolFiles [klass.type.Module.Assembly];
if (symbolFile == null)
continue;
if (! klass.methodsByMethod.ContainsKey (mb)) {
MethodEntry entry = symbolFile.GetMethodByToken ((int)mb.MetadataToken.ToUInt());
ProcessMethod (mb, entry, klass, mb.Name, null);
}
}
}
#endif
msec2 = DateTime.Now.Ticks / 10000;
Console.WriteLine ("Additional classes: " + (msec2 - msec) + " msec");
msec = msec2;
Progress ("Compute coverage", 0.9);
// Compute coverage for all items
computeCoverage (true);
msec2 = DateTime.Now.Ticks / 10000;
Console.WriteLine ("Compute coverage: " + (msec2 - msec) + " msec");
msec = msec2;
Console.WriteLine ("All: " + (msec2 - begin) + " msec");
Progress ("Done loading", 0.9);
// Free memory
symbolFiles = null;
}
//
// Computes the coverage of METHOD
//
private char[] digits = "0123456789".ToCharArray ();
private char[] ws = "\t\n ".ToCharArray ();
private int parsePositiveInteger (string s, int pos) {
int n = 0;
while (s [pos] >= '0' && s [pos] <= '9'){
n = n * 10 + (s [pos] - '0');
pos ++;
}
return n;
}
private void computeMethodCoverage (MethodCoverageItem method, LineNumberEntry[] lines, string cov_info)
{
ClassCoverageItem klass = method.Class;
SourceFileCoverageData source = klass.sourceFile;
source.AddMethod (method);
int nlines = method.endLine - method.startLine + 1;
int[] coverage = new int [nlines];
if (cov_info == null) {
for (int i = 0; i < nlines; ++i)
coverage [i] = 0;
}
else {
for (int i = 0; i < nlines; ++i)
coverage [i] = -1;
// Hand crafted parsing code since this is performance critical
int pos = 0;
int prev_offset = 0;
while (pos < cov_info.Length) {
int pos2 = cov_info.IndexOfAny (digits, pos);
if (pos2 == -1)
break;
pos = cov_info.IndexOfAny (ws, pos2);
if (pos == -1)
break;
int offset = parsePositiveInteger (cov_info, pos2);
pos2 = cov_info.IndexOfAny (digits, pos);
if (pos2 == -1)
break;
pos = cov_info.IndexOfAny (ws, pos2);
int count = parsePositiveInteger (cov_info, pos2);
offset += prev_offset;
prev_offset = offset;
int line1 = 0;
int line2 = 0;
bool found = GetSourceRangeFor (offset, method, lines, ref line1, ref line2);
/*
if (found && (entry.Name.IndexOf ("Find") != -1)) {
Console.WriteLine ("OFFSET: " + offset + " " + line1 + ":" + line2);
}
*/
if (found) {
for (int i = line1; i < line2 + 1; ++i)
if ((i >= method.startLine) && (i <= method.endLine))
if (coverage [i - method.startLine] < count)
coverage [i - method.startLine] = count;
}
}
}
int hit = 0;
int missed = 0;
for (int i = 0; i < nlines; ++i) {
int count = coverage [i];
if (count > 0)
hit ++;
else if (count == 0)
missed ++;
}
method.setCoverage (hit, missed);
method.lineCoverage = coverage;
}
//
// Return a range of source lines which have something to do with OFFSET.
//
private bool GetSourceRangeFor (int offset, MethodCoverageItem method,
LineNumberEntry[] lines,
ref int startLine, ref int endLine)
{
for (int i = 0; i < lines.Length; ++i) {
if (offset >= lines [i].Offset)
if (i == lines.Length - 1) {
startLine = lines [i].Row;
endLine = lines [i].Row;
return true;
}
else if (offset < lines [i + 1].Offset) {
startLine = lines [i].Row;
endLine = lines [i + 1].Row - 1;
return true;
}
}
if (offset <= lines [0].Offset) {
return false;
}
else {
for (int i = 0; i < lines.Length; ++i)
Console.WriteLine (lines [i]);
throw new Exception ("Unable to determine source range for offset " + offset + " in " + method.name);
}
}
#if USE_REFLECTION
private ClassCoverageItem ProcessClass (Type t)
#else
private ClassCoverageItem ProcessClass (TypeDefinition t)
#endif
{
string className = t.FullName;
int nsindex = className.LastIndexOf (".");
string namespace2;
string scopedName;
if (nsindex == -1) {
namespace2 = "<GLOBAL>";
scopedName = className;
} else if (nsindex == 0) {
namespace2 = "<GLOBAL>";
scopedName = className.Substring (1);
}
else {
namespace2 = className.Substring (0, nsindex);
scopedName = className.Substring (nsindex + 1);
}
// Create namespaces
NamespaceCoverageItem ns = (NamespaceCoverageItem)namespaces [namespace2];
if (ns == null) {
string nsPrefix = "";
foreach (String nsPart in namespace2.Split ('.')) {
if (nsPrefix == "")
nsPrefix = nsPart;
else
nsPrefix = nsPrefix + "." + nsPart;
NamespaceCoverageItem ns2 = (NamespaceCoverageItem)namespaces [nsPrefix];
if (ns2 == null) {
if (ns == null)
ns2 = new NamespaceCoverageItem (this, nsPrefix);
else
ns2 = new NamespaceCoverageItem (ns, nsPrefix);
namespaces [nsPrefix] = ns2;
}
ns = ns2;
}
}
ClassCoverageItem klass = (ClassCoverageItem)classes [className];
if (klass == null) {
klass = new ClassCoverageItem (ns);
klass.name_space = namespace2;
klass.name = scopedName;
klass.type = t;
klass.parent = ns;
#if USE_REFLECTION
klass.filtered = IsFiltered ("[" + t.Assembly + "]" + className);
#else
klass.filtered = IsFiltered ("[" + t.Module.Name + "]" + className);
#endif
classes [className] = klass;
}
return klass;
}
#if USE_REFLECTION
private void ProcessMethod (MethodBase monoMethod, MethodEntry entry, ClassCoverageItem klass, string methodName, string cov_info)
#else
private void ProcessMethod (MethodDefinition monoMethod, MethodEntry entry, ClassCoverageItem klass, string methodName, string cov_info)
#endif
{
if (entry == null)
// Compiler generated, abstract method etc.
return;
LineNumberEntry[] lines = entry.GetLineNumberTable ().LineNumbers;
if (lines.Length == 0)
return;
int start_line = lines [0].Row;
int end_line = lines [lines.Length - 1].Row;
MethodCoverageItem method
= new MethodCoverageItem (klass, methodName);
method.startLine = start_line;
method.endLine = end_line;
#if USE_REFLECTION
method.filtered = IsFiltered ("[" + monoMethod.DeclaringType.Assembly + "]" + monoMethod.DeclaringType + "::" + monoMethod.Name);
#else
method.filtered = IsFiltered ("[" + monoMethod.DeclaringType.Module.Name + "]" + monoMethod.DeclaringType + "::" + monoMethod.Name);
#endif
klass.methodsByMethod [monoMethod] = method;
if (klass.sourceFile == null) {
string sourceFile = entry.CompileUnit.SourceFile.FileName;
SourceFileCoverageData source = (SourceFileCoverageData)sources [sourceFile];
if (source == null) {
source = new SourceFileCoverageData (sourceFile);
sources [sourceFile] = source;
}
klass.sourceFile = source;
}
computeMethodCoverage (method, lines, cov_info);
}
}
}
| |
#region license
// Copyright (c) 2009 Rodrigo B. de Oliveira ([email protected])
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
// * Neither the name of Rodrigo B. de Oliveira nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE
// FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
// DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
// SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
// CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF
// THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#endregion
//
// DO NOT EDIT THIS FILE!
//
// This file was generated automatically by astgen.boo.
//
namespace Boo.Lang.Compiler.Ast
{
using System.Collections;
using System.Runtime.Serialization;
[System.Serializable]
public partial class BlockExpression : Expression, INodeWithParameters, INodeWithBody
{
protected ParameterDeclarationCollection _parameters;
protected TypeReference _returnType;
protected Block _body;
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
new public BlockExpression CloneNode()
{
return (BlockExpression)Clone();
}
/// <summary>
/// <see cref="Node.CleanClone"/>
/// </summary>
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
new public BlockExpression CleanClone()
{
return (BlockExpression)base.CleanClone();
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public NodeType NodeType
{
get { return NodeType.BlockExpression; }
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public void Accept(IAstVisitor visitor)
{
visitor.OnBlockExpression(this);
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public bool Matches(Node node)
{
if (node == null) return false;
if (NodeType != node.NodeType) return false;
var other = ( BlockExpression)node;
if (!Node.AllMatch(_parameters, other._parameters)) return NoMatch("BlockExpression._parameters");
if (!Node.Matches(_returnType, other._returnType)) return NoMatch("BlockExpression._returnType");
if (!Node.Matches(_body, other._body)) return NoMatch("BlockExpression._body");
return true;
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public bool Replace(Node existing, Node newNode)
{
if (base.Replace(existing, newNode))
{
return true;
}
if (_parameters != null)
{
ParameterDeclaration item = existing as ParameterDeclaration;
if (null != item)
{
ParameterDeclaration newItem = (ParameterDeclaration)newNode;
if (_parameters.Replace(item, newItem))
{
return true;
}
}
}
if (_returnType == existing)
{
this.ReturnType = (TypeReference)newNode;
return true;
}
if (_body == existing)
{
this.Body = (Block)newNode;
return true;
}
return false;
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override public object Clone()
{
BlockExpression clone = new BlockExpression();
clone._lexicalInfo = _lexicalInfo;
clone._endSourceLocation = _endSourceLocation;
clone._documentation = _documentation;
clone._isSynthetic = _isSynthetic;
clone._entity = _entity;
if (_annotations != null) clone._annotations = (Hashtable)_annotations.Clone();
clone._expressionType = _expressionType;
if (null != _parameters)
{
clone._parameters = _parameters.Clone() as ParameterDeclarationCollection;
clone._parameters.InitializeParent(clone);
}
if (null != _returnType)
{
clone._returnType = _returnType.Clone() as TypeReference;
clone._returnType.InitializeParent(clone);
}
if (null != _body)
{
clone._body = _body.Clone() as Block;
clone._body.InitializeParent(clone);
}
return clone;
}
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
override internal void ClearTypeSystemBindings()
{
_annotations = null;
_entity = null;
_expressionType = null;
if (null != _parameters)
{
_parameters.ClearTypeSystemBindings();
}
if (null != _returnType)
{
_returnType.ClearTypeSystemBindings();
}
if (null != _body)
{
_body.ClearTypeSystemBindings();
}
}
[System.Xml.Serialization.XmlArray]
[System.Xml.Serialization.XmlArrayItem(typeof(ParameterDeclaration))]
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
public ParameterDeclarationCollection Parameters
{
get { return _parameters ?? (_parameters = new ParameterDeclarationCollection(this)); }
set
{
if (_parameters != value)
{
_parameters = value;
if (null != _parameters)
{
_parameters.InitializeParent(this);
}
}
}
}
[System.Xml.Serialization.XmlElement]
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
public TypeReference ReturnType
{
get { return _returnType; }
set
{
if (_returnType != value)
{
_returnType = value;
if (null != _returnType)
{
_returnType.InitializeParent(this);
}
}
}
}
[System.Xml.Serialization.XmlElement]
[System.CodeDom.Compiler.GeneratedCodeAttribute("astgen.boo", "1")]
public Block Body
{
get
{
if (_body == null)
{
_body = new Block();
_body.InitializeParent(this);
}
return _body;
}
set
{
if (_body != value)
{
_body = value;
if (null != _body)
{
_body.InitializeParent(this);
}
}
}
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.IO;
using System.Diagnostics;
using System.Xml.XPath;
using System.Xml.Schema;
using System.Collections;
namespace System.Xml
{
/// <summary>
/// Implementations of XmlRawWriter are intended to be wrapped by the XmlWellFormedWriter. The
/// well-formed writer performs many checks in behalf of the raw writer, and keeps state that the
/// raw writer otherwise would have to keep. Therefore, the well-formed writer will call the
/// XmlRawWriter using the following rules, in order to make raw writers easier to implement:
///
/// 1. The well-formed writer keeps a stack of element names, and always calls
/// WriteEndElement(string, string, string) instead of WriteEndElement().
/// 2. The well-formed writer tracks namespaces, and will pass himself in via the
/// WellformedWriter property. It is used in the XmlRawWriter's implementation of IXmlNamespaceResolver.
/// Thus, LookupPrefix does not have to be implemented.
/// 3. The well-formed writer tracks write states, so the raw writer doesn't need to.
/// 4. The well-formed writer will always call StartElementContent.
/// 5. The well-formed writer will always call WriteNamespaceDeclaration for namespace nodes,
/// rather than calling WriteStartAttribute(). If the writer is supporting namespace declarations in chunks
/// (SupportsNamespaceDeclarationInChunks is true), the XmlWellFormedWriter will call WriteStartNamespaceDeclaration,
/// then any method that can be used to write out a value of an attribute (WriteString, WriteChars, WriteRaw, WriteCharEntity...)
/// and then WriteEndNamespaceDeclaration - instead of just a single WriteNamespaceDeclaration call. This feature will be
/// supported by raw writers serializing to text that wish to preserve the attribute value escaping etc.
/// 6. The well-formed writer guarantees a well-formed document, including correct call sequences,
/// correct namespaces, and correct document rule enforcement.
/// 7. All element and attribute names will be fully resolved and validated. Null will never be
/// passed for any of the name parts.
/// 8. The well-formed writer keeps track of xml:space and xml:lang.
/// 9. The well-formed writer verifies NmToken, Name, and QName values and calls WriteString().
/// </summary>
internal abstract partial class XmlRawWriter : XmlWriter
{
//
// Fields
//
// base64 converter
protected XmlRawWriterBase64Encoder base64Encoder;
// namespace resolver
protected IXmlNamespaceResolver resolver;
//
// XmlWriter implementation
//
// Raw writers do not have to track whether this is a well-formed document.
public override void WriteStartDocument()
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
public override void WriteStartDocument(bool standalone)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
public override void WriteEndDocument()
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
public override void WriteDocType(string name, string pubid, string sysid, string subset)
{
}
// Raw writers do not have to keep a stack of element names.
public override void WriteEndElement()
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
// Raw writers do not have to keep a stack of element names.
public override void WriteFullEndElement()
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
// By default, convert base64 value to string and call WriteString.
public override void WriteBase64(byte[] buffer, int index, int count)
{
if (base64Encoder == null)
{
base64Encoder = new XmlRawWriterBase64Encoder(this);
}
// Encode will call WriteRaw to write out the encoded characters
base64Encoder.Encode(buffer, index, count);
}
// Raw writers do not have to keep track of namespaces.
public override string LookupPrefix(string ns)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
// Raw writers do not have to keep track of write states.
public override WriteState WriteState
{
get
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
}
// Raw writers do not have to keep track of xml:space.
public override XmlSpace XmlSpace
{
get { throw new InvalidOperationException(SR.Xml_InvalidOperation); }
}
// Raw writers do not have to keep track of xml:lang.
public override string XmlLang
{
get { throw new InvalidOperationException(SR.Xml_InvalidOperation); }
}
// Raw writers do not have to verify NmToken values.
public override void WriteNmToken(string name)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
// Raw writers do not have to verify Name values.
public override void WriteName(string name)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
// Raw writers do not have to verify QName values.
public override void WriteQualifiedName(string localName, string ns)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
// Forward call to WriteString(string).
public override void WriteCData(string text)
{
WriteString(text);
}
// Forward call to WriteString(string).
public override void WriteCharEntity(char ch)
{
WriteString(new string(ch, 1));
}
// Forward call to WriteString(string).
public override void WriteSurrogateCharEntity(char lowChar, char highChar)
{
WriteString(new string(new char[] { lowChar, highChar }));
}
// Forward call to WriteString(string).
public override void WriteWhitespace(string ws)
{
WriteString(ws);
}
// Forward call to WriteString(string).
public override void WriteChars(char[] buffer, int index, int count)
{
WriteString(new string(buffer, index, count));
}
// Forward call to WriteString(string).
public override void WriteRaw(char[] buffer, int index, int count)
{
WriteString(new string(buffer, index, count));
}
// Forward call to WriteString(string).
public override void WriteRaw(string data)
{
WriteString(data);
}
// Override in order to handle Xml simple typed values and to pass resolver for QName values
public override void WriteValue(object value)
{
if (value == null)
{
throw new ArgumentNullException("value");
}
WriteString(XmlUntypedConverter.ToString(value, resolver));
}
// Override in order to handle Xml simple typed values and to pass resolver for QName values
public override void WriteValue(string value)
{
WriteString(value);
}
public override void WriteValue(DateTimeOffset value)
{
// For compatibility with custom writers, XmlWriter writes DateTimeOffset as DateTime.
// Our internal writers should use the DateTimeOffset-String conversion from XmlConvert.
WriteString(XmlConvert.ToString(value));
}
// Copying to XmlRawWriter is not currently supported.
public override void WriteAttributes(XmlReader reader, bool defattr)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
public override void WriteNode(XmlReader reader, bool defattr)
{
throw new InvalidOperationException(SR.Xml_InvalidOperation);
}
//
// XmlRawWriter methods and properties
//
// Get and set the namespace resolver that's used by this RawWriter to resolve prefixes.
internal virtual IXmlNamespaceResolver NamespaceResolver
{
get
{
return resolver;
}
set
{
resolver = value;
}
}
// Write the xml declaration. This must be the first call.
internal virtual void WriteXmlDeclaration(XmlStandalone standalone)
{
}
internal virtual void WriteXmlDeclaration(string xmldecl)
{
}
// Called after an element's attributes have been enumerated, but before any children have been
// enumerated. This method must always be called, even for empty elements.
internal abstract void StartElementContent();
// Called before a root element is written (before the WriteStartElement call)
// the conformanceLevel specifies the current conformance level the writer is operating with.
internal virtual void OnRootElement(ConformanceLevel conformanceLevel) { }
// WriteEndElement() and WriteFullEndElement() overloads, in which caller gives the full name of the
// element, so that raw writers do not need to keep a stack of element names. This method should
// always be called instead of WriteEndElement() or WriteFullEndElement() without parameters.
internal abstract void WriteEndElement(string prefix, string localName, string ns);
internal virtual void WriteFullEndElement(string prefix, string localName, string ns)
{
WriteEndElement(prefix, localName, ns);
}
internal virtual void WriteQualifiedName(string prefix, string localName, string ns)
{
if (prefix.Length != 0)
{
WriteString(prefix);
WriteString(":");
}
WriteString(localName);
}
// This method must be called instead of WriteStartAttribute() for namespaces.
internal abstract void WriteNamespaceDeclaration(string prefix, string ns);
// When true, the XmlWellFormedWriter will call:
// 1) WriteStartNamespaceDeclaration
// 2) any method that can be used to write out a value of an attribute: WriteString, WriteChars, WriteRaw, WriteCharEntity...
// 3) WriteEndNamespaceDeclaration
// instead of just a single WriteNamespaceDeclaration call.
//
// This feature will be supported by raw writers serializing to text that wish to preserve the attribute value escaping and entities.
internal virtual bool SupportsNamespaceDeclarationInChunks
{
get
{
return false;
}
}
internal virtual void WriteStartNamespaceDeclaration(string prefix)
{
throw new NotSupportedException();
}
internal virtual void WriteEndNamespaceDeclaration()
{
throw new NotSupportedException();
}
// This is called when the remainder of a base64 value should be output.
internal virtual void WriteEndBase64()
{
// The Flush will call WriteRaw to write out the rest of the encoded characters
base64Encoder.Flush();
}
internal virtual void Close(WriteState currentState)
{
Dispose();
}
}
}
| |
//
// Copyright (c) 2004-2017 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System.Diagnostics;
namespace NLog.UnitTests.Targets.Wrappers
{
using System;
using System.Collections.Generic;
using System.Threading;
using NLog.Common;
using NLog.Targets;
using NLog.Targets.Wrappers;
using Xunit;
public class BufferingTargetWrapperTests : NLogTestBase
{
[Fact]
public void BufferingTargetWrapperSyncTest1()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
// write one more event - everything will be flushed
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
Assert.Equal(10, myTarget.WriteCount);
for (var i = 0; i < hitCount; ++i)
{
Assert.Same(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// write 9 more events - they will all be buffered and no final continuation will be reached
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
// no change
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
Assert.Equal(10, myTarget.WriteCount);
Exception flushException = null;
var flushHit = new ManualResetEvent(false);
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
});
flushHit.WaitOne();
Assert.Null(flushException);
// make sure remaining events were written
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
Assert.Equal(19, myTarget.WriteCount);
Assert.Equal(1, myTarget.FlushCount);
// flushes happen on the same thread
for (var i = 10; i < hitCount; ++i)
{
Assert.NotNull(continuationThread[i]);
Assert.Same(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// flush again - should just invoke Flush() on the wrapped target
flushHit.Reset();
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
});
flushHit.WaitOne();
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
Assert.Equal(19, myTarget.WriteCount);
Assert.Equal(2, myTarget.FlushCount);
targetWrapper.Close();
myTarget.Close();
}
[Fact]
public void BufferingTargetWithFallbackGroupAndFirstTargetFails_Write_SecondTargetWritesEvents()
{
var myTarget = new MyTarget { FailCounter = 1 };
var myTarget2 = new MyTarget();
var fallbackGroup = new FallbackGroupTarget(myTarget, myTarget2);
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = fallbackGroup,
BufferSize = 10,
};
InitializeTargets(myTarget, targetWrapper, myTarget2, fallbackGroup);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, myTarget.WriteCount);
// write one more event - everything will be flushed
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.Equal(1, myTarget.WriteCount);
Assert.Equal(10, myTarget2.WriteCount);
targetWrapper.Close();
myTarget.Close();
}
[Fact]
public void BufferingTargetWrapperSyncWithTimedFlushTest()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
FlushTimeout = 50,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
// sleep 100 ms, this will trigger the timer and flush all events
Thread.Sleep(100);
Assert.Equal(9, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(9, myTarget.BufferedTotalEvents);
Assert.Equal(9, myTarget.WriteCount);
for (var i = 0; i < hitCount; ++i)
{
Assert.NotSame(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// write 11 more events, 10 will be hit immediately because the buffer will fill up
// 1 will be pending
for (var i = 0; i < 11; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
Assert.Equal(19, myTarget.WriteCount);
// sleep 100ms and the last remaining one will be flushed
Thread.Sleep(100);
Assert.Equal(20, hitCount);
Assert.Equal(3, myTarget.BufferedWriteCount);
Assert.Equal(20, myTarget.BufferedTotalEvents);
Assert.Equal(20, myTarget.WriteCount);
}
[Fact]
public void BufferingTargetWrapperAsyncTest1()
{
var myTarget = new MyAsyncTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
// write 9 events - they will all be buffered and no final continuation will be reached
var eventCounter = 0;
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
Assert.Equal(0, hitCount);
// write one more event - everything will be flushed
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
while (hitCount < 10)
{
Thread.Sleep(10);
}
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
for (var i = 0; i < hitCount; ++i)
{
Assert.NotSame(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// write 9 more events - they will all be buffered and no final continuation will be reached
for (var i = 0; i < 9; ++i)
{
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
}
// no change
Assert.Equal(10, hitCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(10, myTarget.BufferedTotalEvents);
Exception flushException = null;
var flushHit = new ManualResetEvent(false);
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
Thread.Sleep(10);
});
flushHit.WaitOne();
Assert.Null(flushException);
// make sure remaining events were written
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
// flushes happen on another thread
for (var i = 10; i < hitCount; ++i)
{
Assert.NotNull(continuationThread[i]);
Assert.NotSame(Thread.CurrentThread, continuationThread[i]);
Assert.Null(lastException[i]);
}
// flush again - should not do anything
flushHit.Reset();
targetWrapper.Flush(
ex =>
{
flushException = ex;
flushHit.Set();
Thread.Sleep(10);
});
flushHit.WaitOne();
Assert.Equal(19, hitCount);
Assert.Equal(2, myTarget.BufferedWriteCount);
Assert.Equal(19, myTarget.BufferedTotalEvents);
targetWrapper.Close();
myTarget.Close();
}
[Fact]
public void BufferingTargetWrapperSyncWithTimedFlushNonSlidingTest()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
FlushTimeout = 400,
SlidingTimeout = false,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
var resetEvent = new ManualResetEvent(false);
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
if (eventNumber > 0)
{
resetEvent.Set();
}
};
var eventCounter = 0;
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Assert.True(resetEvent.WaitOne(5000));
Assert.Equal(2, hitCount);
Assert.Equal(2, myTarget.WriteCount);
}
[Fact]
public void BufferingTargetWrapperSyncWithTimedFlushSlidingTest()
{
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = 10,
FlushTimeout = 400,
};
InitializeTargets(myTarget, targetWrapper);
const int totalEvents = 100;
var continuationHit = new bool[totalEvents];
var lastException = new Exception[totalEvents];
var continuationThread = new Thread[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
lastException[eventNumber] = ex;
continuationThread[eventNumber] = Thread.CurrentThread;
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
var eventCounter = 0;
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Thread.Sleep(100);
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(eventCounter++)));
Thread.Sleep(100);
Assert.Equal(0, hitCount);
Assert.Equal(0, myTarget.WriteCount);
Thread.Sleep(600);
Assert.Equal(2, hitCount);
Assert.Equal(2, myTarget.WriteCount);
}
[Fact]
public void WhenWrappedTargetThrowsExceptionThisIsHandled()
{
var myTarget = new MyTarget { ThrowException = true };
var bufferingTargetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
FlushTimeout = -1
};
InitializeTargets(myTarget, bufferingTargetWrapper);
bufferingTargetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(_ => { }));
var flushHit = new ManualResetEvent(false);
bufferingTargetWrapper.Flush(ex => flushHit.Set());
flushHit.WaitOne();
Assert.Equal(1, myTarget.FlushCount);
}
[Fact]
public void BufferingTargetWrapperSyncWithOverflowDiscardTest()
{
const int totalEvents = 15;
const int bufferSize = 10;
var myTarget = new MyTarget();
var targetWrapper = new BufferingTargetWrapper
{
WrappedTarget = myTarget,
BufferSize = bufferSize,
OverflowAction = BufferingTargetWrapperOverflowAction.Discard
};
InitializeTargets(myTarget, targetWrapper);
var continuationHit = new bool[totalEvents];
var hitCount = 0;
CreateContinuationFunc createAsyncContinuation =
eventNumber =>
ex =>
{
continuationHit[eventNumber] = true;
Interlocked.Increment(ref hitCount);
};
Assert.Equal(0, myTarget.WriteCount);
for (int i = 0; i < totalEvents; i++) {
targetWrapper.WriteAsyncLogEvent(new LogEventInfo().WithContinuation(createAsyncContinuation(i)));
}
// No events should be written to the wrapped target unless flushing manually.
Assert.Equal(0, myTarget.WriteCount);
Assert.Equal(0, myTarget.BufferedWriteCount);
Assert.Equal(0, myTarget.BufferedTotalEvents);
targetWrapper.Flush(e => { });
Assert.Equal(bufferSize, hitCount);
Assert.Equal(bufferSize, myTarget.WriteCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(bufferSize, myTarget.BufferedTotalEvents);
// Validate that we dropped the oldest events.
Assert.False(continuationHit[totalEvents-bufferSize-1]);
Assert.True(continuationHit[totalEvents - bufferSize]);
// Make sure the events do not stay in the buffer.
targetWrapper.Flush(e => { });
Assert.Equal(bufferSize, hitCount);
Assert.Equal(bufferSize, myTarget.WriteCount);
Assert.Equal(1, myTarget.BufferedWriteCount);
Assert.Equal(bufferSize, myTarget.BufferedTotalEvents);
}
private static void InitializeTargets(params Target[] targets)
{
foreach (var target in targets)
{
target.Initialize(null);
}
}
private class MyAsyncTarget : Target
{
public int BufferedWriteCount { get; private set; }
public int BufferedTotalEvents { get; private set; }
protected override void Write(LogEventInfo logEvent)
{
throw new NotSupportedException();
}
protected override void Write(IList<AsyncLogEventInfo> logEvents)
{
this.BufferedWriteCount++;
this.BufferedTotalEvents += logEvents.Count;
for (int i = 0; i < logEvents.Count; ++i)
{
var @event = logEvents[i];
ThreadPool.QueueUserWorkItem(
s =>
{
if (this.ThrowExceptions)
{
@event.Continuation(new InvalidOperationException("Some problem!"));
}
else
{
@event.Continuation(null);
}
});
}
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
ThreadPool.QueueUserWorkItem(
s => asyncContinuation(null));
}
public bool ThrowExceptions { get; set; }
}
private class MyTarget : Target
{
public int FlushCount { get; private set; }
public int WriteCount { get; private set; }
public int BufferedWriteCount { get; private set; }
public int BufferedTotalEvents { get; private set; }
public bool ThrowException { get; set; }
public int FailCounter { get; set; }
protected override void Write(IList<AsyncLogEventInfo> logEvents)
{
this.BufferedWriteCount++;
this.BufferedTotalEvents += logEvents.Count;
base.Write(logEvents);
}
protected override void Write(LogEventInfo logEvent)
{
Assert.True(this.FlushCount <= this.WriteCount);
this.WriteCount++;
if (ThrowException)
{
throw new Exception("Target exception");
}
if (this.FailCounter > 0)
{
this.FailCounter--;
throw new InvalidOperationException("Some failure.");
}
}
protected override void FlushAsync(AsyncContinuation asyncContinuation)
{
this.FlushCount++;
asyncContinuation(null);
}
}
private delegate AsyncContinuation CreateContinuationFunc(int eventNumber);
}
}
| |
//////////////////////////////////////////////////////////////////////////
// Code Named: VG-Ripper
// Function : Extracts Images posted on RiP forums and attempts to fetch
// them to disk.
//
// This software is licensed under the MIT license. See license.txt for
// details.
//
// Copyright (c) The Watcher
// Partial Rights Reserved.
//
//////////////////////////////////////////////////////////////////////////
// This file is part of the RiP Ripper project base.
using System;
using System.Collections;
using System.IO;
using System.Net;
using System.Threading;
namespace Ripper
{
using Ripper.Core.Components;
using Ripper.Core.Objects;
/// <summary>
/// Worker class to get images from ImageThrust.com
/// </summary>
public class ImageThrust : ServiceTemplate
{
public ImageThrust(ref string sSavePath, ref string strURL, ref string thumbURL, ref string imageName, ref int imageNumber, ref Hashtable hashtable)
: base(sSavePath, strURL, thumbURL, imageName, imageNumber, ref hashtable)
{
// Add constructor logic here
}
protected override bool DoDownload()
{
var strImgURL = this.ImageLinkURL;
if (this.EventTable.ContainsKey(strImgURL))
{
return true;
}
var strFilePath = string.Empty;
strFilePath = strImgURL.Substring(strImgURL.IndexOf("view-image/") + 11);
strFilePath = strFilePath.Remove(strFilePath.Length - 5, 5);
try
{
if (!Directory.Exists(this.SavePath))
Directory.CreateDirectory(this.SavePath);
}
catch (IOException ex)
{
// MainForm.DeleteMessage = ex.Message;
// MainForm.Delete = true;
return false;
}
strFilePath = Path.Combine(this.SavePath, Utility.RemoveIllegalCharecters(strFilePath));
var CCObj = new CacheObject();
CCObj.IsDownloaded = false;
CCObj.FilePath = strFilePath;
CCObj.Url = strImgURL;
try
{
this.EventTable.Add(strImgURL, CCObj);
}
catch (ThreadAbortException)
{
return true;
}
catch (Exception)
{
if (this.EventTable.ContainsKey(strImgURL))
{
return false;
}
else
{
this.EventTable.Add(strImgURL, CCObj);
}
}
var strIVPage = this.GetImageHostPage(ref strImgURL);
if (strIVPage.Length < 10)
{
return false;
}
var strNewURL = string.Empty;
var iStartSRC = 0;
var iEndSRC = 0;
iStartSRC = strIVPage.IndexOf("src=\"/i/");
if (iStartSRC < 0)
{
return false;
}
iStartSRC += 8;
iEndSRC = strIVPage.IndexOf(".jpg\" id=\"picture\"", iStartSRC);
if (iEndSRC < 0)
{
return false;
}
strNewURL =
$"{strImgURL.Substring(0, strImgURL.IndexOf("/", 8) + 1)}i/{strIVPage.Substring(iStartSRC, iEndSRC - iStartSRC) + ".jpg"}";
//////////////////////////////////////////////////////////////////////////
try
{
var lHttpWebRequest = (HttpWebRequest)WebRequest.Create(strNewURL);
lHttpWebRequest.UserAgent =
"Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.10) Gecko/20050716 Firefox/1.0.6";
lHttpWebRequest.Headers.Add("Accept-Language: en-us,en;q=0.5");
lHttpWebRequest.Headers.Add("Accept-Encoding: gzip,deflate");
lHttpWebRequest.Headers.Add("Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7");
lHttpWebRequest.Referer = strImgURL;
lHttpWebRequest.Accept = "image/png,*/*;q=0.5";
lHttpWebRequest.KeepAlive = true;
var lHttpWebResponse = (HttpWebResponse)lHttpWebRequest.GetResponse();
var lHttpWebResponseStream = lHttpWebRequest.GetResponse().GetResponseStream();
if (lHttpWebResponse.ContentType.IndexOf("image") < 0)
{
return false;
}
switch (lHttpWebResponse.ContentType.ToLower())
{
case "image/jpeg":
strFilePath += ".jpg";
break;
case "image/gif":
strFilePath += ".gif";
break;
case "image/png":
strFilePath += ".png";
break;
}
var NewAlteredPath = Utility.GetSuitableName(strFilePath);
if (strFilePath != NewAlteredPath)
{
strFilePath = NewAlteredPath;
((CacheObject)this.EventTable[this.ImageLinkURL]).FilePath = strFilePath;
}
lHttpWebResponseStream.Close();
var client = new WebClient();
client.Headers.Add("Accept-Language: en-us,en;q=0.5");
client.Headers.Add("Accept-Encoding: gzip,deflate");
client.Headers.Add("Accept-Charset: ISO-8859-1,utf-8;q=0.7,*;q=0.7");
client.Headers.Add("Referer: " + strImgURL);
client.Headers.Add(
"User-Agent: Mozilla/5.0 (Windows; U; Windows NT 5.2; en-US; rv:1.7.10) Gecko/20050716 Firefox/1.0.6");
client.DownloadFile(strNewURL, strFilePath);
client.Dispose();
}
catch (ThreadAbortException)
{
((CacheObject)this.EventTable[strImgURL]).IsDownloaded = false;
ThreadManager.GetInstance().RemoveThreadbyId(this.ImageLinkURL);
return true;
}
catch (IOException ex)
{
// MainForm.DeleteMessage = ex.Message;
// MainForm.Delete = true;
((CacheObject)this.EventTable[strImgURL]).IsDownloaded = false;
ThreadManager.GetInstance().RemoveThreadbyId(this.ImageLinkURL);
return true;
}
catch (WebException)
{
((CacheObject)this.EventTable[strImgURL]).IsDownloaded = false;
ThreadManager.GetInstance().RemoveThreadbyId(this.ImageLinkURL);
return false;
}
((CacheObject)this.EventTable[this.ImageLinkURL]).IsDownloaded = true;
// CacheController.GetInstance().u_s_LastPic = ((CacheObject)eventTable[mstrURL]).FilePath;
CacheController.Instance().LastPic =
((CacheObject)this.EventTable[this.ImageLinkURL]).FilePath = strFilePath;
return true;
}
//////////////////////////////////////////////////////////////////////////
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading.Tasks;
using Orleans;
using Orleans.Runtime;
using Orleans.Concurrency;
using Orleans.Samples.Chirper.GrainInterfaces;
using Orleans.Providers;
namespace Orleans.Samples.Chirper.Grains
{
public class ChirperAccountState
{
/// <summary>The list of publishers who this user is following</summary>
public Dictionary<ChirperUserInfo, IChirperPublisher> Subscriptions { get; set; }
/// <summary>The list of subscribers who are following this user</summary>
public Dictionary<ChirperUserInfo, IChirperSubscriber> Followers { get; set; }
/// <summary>Chirp messages recently received by this user</summary>
public Queue<ChirperMessage> RecentReceivedMessages { get; set; }
/// <summary>Chirp messages recently published by this user</summary>
public Queue<ChirperMessage> MyPublishedMessages { get; set; }
public long UserId { get; set; }
/// <summary>Alias / username for this actor / user</summary>
public string UserAlias { get; set; }
}
[Reentrant]
[StorageProvider(ProviderName = "MemoryStore")]
public class ChirperAccount : Grain<ChirperAccountState>, IChirperAccount
{
/// <summary>Size for the recently received message cache</summary>
private int ReceivedMessagesCacheSize;
/// <summary>Size for the published message cache</summary>
private int PublishedMessagesCacheSize;
private ObserverSubscriptionManager<IChirperViewer> viewers;
private Logger logger;
private const int MAX_MESSAGE_LENGTH = 280;
private string Me
{
get
{
return String.Format("I am: [{0}.{1}]", State.UserAlias, State.UserId);
}
}
#region Grain overrides
public override Task OnActivateAsync()
{
ReceivedMessagesCacheSize = 100;
PublishedMessagesCacheSize = 100;
if (State.RecentReceivedMessages == null)
{
State.RecentReceivedMessages = new Queue<ChirperMessage>(ReceivedMessagesCacheSize);
}
if (State.MyPublishedMessages == null)
{
State.MyPublishedMessages = new Queue<ChirperMessage>(PublishedMessagesCacheSize);
}
if (State.Followers == null)
{
State.Followers = new Dictionary<ChirperUserInfo, IChirperSubscriber>();
}
if (State.Subscriptions == null)
{
State.Subscriptions = new Dictionary<ChirperUserInfo, IChirperPublisher>();
}
State.UserId = this.GetPrimaryKeyLong();
logger = GetLogger("ChirperAccountGrain");
if (logger.IsVerbose) logger.Verbose("{0}: Created activation of ChirperAccount grain.", Me);
viewers = new ObserverSubscriptionManager<IChirperViewer>();
// Viewers are transient connections -- they will need to reconnect themselves
return TaskDone.Done;
}
#endregion
#region IChirperAccountGrain interface methods
public async Task SetUserDetails(ChirperUserInfo userInfo)
{
string alias = userInfo.UserAlias;
if (alias != null)
{
if (logger.IsVerbose)
logger.Verbose("{0} Setting UserAlias = {1}.", Me, alias);
State.UserAlias = alias;
await WriteStateAsync();
}
}
public async Task PublishMessage(string message)
{
ChirperMessage chirp = CreateNewChirpMessage(message);
if (logger.IsVerbose)
logger.Verbose("{0} Publishing new chirp message = {1}.", Me, chirp);
State.MyPublishedMessages.Enqueue(chirp);
// only relevant when not using fixed queue
while (State.MyPublishedMessages.Count > PublishedMessagesCacheSize) // to keep not more than the max number of messages
{
State.MyPublishedMessages.Dequeue();
}
await WriteStateAsync();
List<Task> promises = new List<Task>();
if (State.Followers.Count > 0)
{
// Notify any subscribers that a new chirp has published
if (logger.IsVerbose)
logger.Verbose("{0} Sending new chirp message to {1} subscribers.", Me, State.Followers.Count);
foreach (IChirperSubscriber subscriber in State.Followers.Values)
{
promises.Add(subscriber.NewChirp(chirp));
}
}
if (viewers.Count > 0)
{
// Notify any viewers that a new chirp has published
if (logger.IsVerbose) logger.Verbose("{0} Sending new chirp message to {1} viewers.", Me, viewers.Count);
viewers.Notify(
v => v.NewChirpArrived(chirp)
);
}
await Task.WhenAll(promises.ToArray());
}
public Task<List<ChirperMessage>> GetReceivedMessages(int n, int start)
{
if (start < 0) start = 0;
if ((start + n) > State.RecentReceivedMessages.Count)
{
n = State.RecentReceivedMessages.Count - start;
}
return Task.FromResult(
State.RecentReceivedMessages.Skip(start).Take(n).ToList());
}
public async Task FollowUserId(long userId)
{
if (logger.IsVerbose) logger.Verbose("{0} FollowUserId({1}).", Me, userId);
IChirperPublisher userToFollow = GrainFactory.GetGrain<IChirperPublisher>(userId);
string alias = await userToFollow.GetUserAlias();
await FollowUser(userId, alias, userToFollow);
}
public async Task UnfollowUserId(long userId)
{
if (logger.IsVerbose) logger.Verbose("{0} UnfollowUserId({1}).", Me, userId);
IChirperPublisher userToUnfollow = GrainFactory.GetGrain<IChirperPublisher>(userId);
string alias = await userToUnfollow.GetUserAlias();
await UnfollowUser(userId, alias, userToUnfollow);
}
public Task<List<ChirperUserInfo>> GetFollowingList()
{
return Task.FromResult(State.Subscriptions.Keys.ToList());
}
public Task<List<ChirperUserInfo>> GetFollowersList()
{
return Task.FromResult(State.Followers.Keys.ToList());
}
public Task ViewerConnect(IChirperViewer viewer)
{
viewers.Subscribe(viewer);
return TaskDone.Done;
}
public Task ViewerDisconnect(IChirperViewer viewer)
{
viewers.Unsubscribe(viewer);
return TaskDone.Done;
}
#endregion
#region IChirperPublisher interface methods
public Task<long> GetUserId()
{
return Task.FromResult(State.UserId);
}
public Task<string> GetUserAlias()
{
return Task.FromResult(State.UserAlias);
}
public Task<List<ChirperMessage>> GetPublishedMessages(int n, int start)
{
if (start < 0) start = 0;
if ((start + n) > State.MyPublishedMessages.Count) n = State.MyPublishedMessages.Count - start;
return Task.FromResult(
State.MyPublishedMessages.Skip(start).Take(n).ToList());
}
public Task AddFollower(string alias, long userId, IChirperSubscriber follower)
{
ChirperUserInfo userInfo = ChirperUserInfo.GetUserInfo(userId, alias);
if (State.Followers.ContainsKey(userInfo))
{
State.Followers.Remove(userInfo);
}
State.Followers[userInfo] = follower;
return WriteStateAsync();
}
public async Task RemoveFollower(string alias, IChirperSubscriber follower)
{
IEnumerable<KeyValuePair<ChirperUserInfo, IChirperSubscriber>> found = State.Followers.Where(f => f.Key.UserAlias == alias).ToList();
if (found.Any())
{
ChirperUserInfo userInfo = found.FirstOrDefault().Key;
State.Followers.Remove(userInfo);
await WriteStateAsync();
}
}
#endregion
#region IChirperSubscriber notification callback interface
public async Task NewChirp(ChirperMessage chirp)
{
if (logger.IsVerbose) logger.Verbose("{0} Received chirp message = {1}", Me, chirp);
State.RecentReceivedMessages.Enqueue(chirp);
// only relevant when not using fixed queue
while (State.MyPublishedMessages.Count > PublishedMessagesCacheSize) // to keep not more than the max number of messages
{
State.MyPublishedMessages.Dequeue();
}
await WriteStateAsync();
if (viewers.Count > 0)
{
// Notify any viewers that a new chirp has published
if (logger.IsVerbose)
logger.Verbose("{0} Sending received chirp message to {1} viewers", Me, viewers.Count);
viewers.Notify(
v => v.NewChirpArrived(chirp)
);
}
#if DEBUG
const string busywait = "#busywait";
var i = chirp.Message.IndexOf(busywait, StringComparison.Ordinal);
int n;
if (i >= 0 && Int32.TryParse(chirp.Message.Substring(i + busywait.Length + 1), out n))
{
var watch = new Stopwatch();
watch.Start();
while (watch.ElapsedMilliseconds < n)
{
// spin
}
watch.Stop();
}
#endif
}
#endregion
private async Task FollowUser(long userId, string userAlias, IChirperPublisher userToFollow)
{
if (logger.IsVerbose) logger.Verbose("{0} FollowUser({1}).", Me, userAlias);
await userToFollow.AddFollower(State.UserAlias, State.UserId, this);
ChirperUserInfo userInfo = ChirperUserInfo.GetUserInfo(userId, userAlias);
State.Subscriptions[userInfo] = userToFollow;
await WriteStateAsync();
// Notify any viewers that a subscription has been added for this user
viewers.Notify(
v => v.SubscriptionAdded(userInfo)
);
}
private async Task UnfollowUser(long userId, string userAlias, IChirperPublisher userToUnfollow)
{
await userToUnfollow.RemoveFollower(State.UserAlias, this);
ChirperUserInfo userInfo = ChirperUserInfo.GetUserInfo(userId, userAlias);
State.Subscriptions.Remove(userInfo);
await WriteStateAsync();
// Notify any viewers that a subscription has been removed for this user
viewers.Notify(
v => v.SubscriptionRemoved(userInfo)
);
}
private ChirperMessage CreateNewChirpMessage(string message)
{
var chirp = new ChirperMessage();
chirp.PublisherId = State.UserId;
chirp.PublisherAlias = State.UserAlias;
chirp.Timestamp = DateTime.Now;
chirp.Message = message;
if (chirp.Message.Length > MAX_MESSAGE_LENGTH) chirp.Message = message.Substring(0, MAX_MESSAGE_LENGTH);
return chirp;
}
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using Microsoft.CodeAnalysis.Diagnostics;
using Test.Utilities;
using Xunit;
namespace System.Runtime.Analyzers.UnitTests
{
public class SpecifyCultureInfoTests : DiagnosticAnalyzerTestBase
{
protected override DiagnosticAnalyzer GetBasicDiagnosticAnalyzer()
{
return new SpecifyCultureInfoAnalyzer();
}
protected override DiagnosticAnalyzer GetCSharpDiagnosticAnalyzer()
{
return new SpecifyCultureInfoAnalyzer();
}
[Fact]
public void CA1304_PlainString_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass0
{
public string SpecifyCultureInfo01()
{
return ""foo"".ToLower();
}
}",
GetCSharpResultAt(9, 16, SpecifyCultureInfoAnalyzer.Rule, "string.ToLower()", "CultureInfoTestClass0.SpecifyCultureInfo01()", "string.ToLower(CultureInfo)"));
}
[Fact]
public void CA1304_VariableStringInsideDifferentContainingSymbols_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass1
{
public string LowercaseAString(string name)
{
return name.ToLower();
}
public string InsideALambda(string insideLambda)
{
Func<string> ddd = () =>
{
return insideLambda.ToLower();
};
return null;
}
public string PropertyWithALambda
{
get
{
Func<string> ddd = () =>
{
return ""InsideGetter"".ToLower();
};
return null;
}
}
}
",
GetCSharpResultAt(9, 16, SpecifyCultureInfoAnalyzer.Rule, "string.ToLower()", "CultureInfoTestClass1.LowercaseAString(string)", "string.ToLower(CultureInfo)"),
GetCSharpResultAt(16, 20, SpecifyCultureInfoAnalyzer.Rule, "string.ToLower()", "CultureInfoTestClass1.InsideALambda(string)", "string.ToLower(CultureInfo)"),
GetCSharpResultAt(28, 24, SpecifyCultureInfoAnalyzer.Rule, "string.ToLower()", "CultureInfoTestClass1.PropertyWithALambda.get", "string.ToLower(CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadHasCultureInfoAsFirstArgument_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
MethodOverloadHasCultureInfoAsFirstArgument(""Foo"");
}
public static void MethodOverloadHasCultureInfoAsFirstArgument(string format)
{
MethodOverloadHasCultureInfoAsFirstArgument(CultureInfo.CurrentCulture, format);
}
public static void MethodOverloadHasCultureInfoAsFirstArgument(CultureInfo provider, string format)
{
Console.WriteLine(string.Format(provider, format));
}
}",
GetCSharpResultAt(9, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsFirstArgument(string)", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsFirstArgument(CultureInfo, string)"));
}
[Fact]
public void CA1304_MethodOverloadHasCultureInfoAsLastArgument_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
MethodOverloadHasCultureInfoAsLastArgument(""Foo"");
}
public static void MethodOverloadHasCultureInfoAsLastArgument(string format)
{
MethodOverloadHasCultureInfoAsLastArgument(format, CultureInfo.CurrentCulture);
}
public static void MethodOverloadHasCultureInfoAsLastArgument(string format, CultureInfo provider)
{
Console.WriteLine(string.Format(provider, format));
}
public static void MethodOverloadHasCultureInfoAsLastArgument(CultureInfo provider, string format)
{
Console.WriteLine(string.Format(provider, format));
}
}",
GetCSharpResultAt(9, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsLastArgument(string)", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsLastArgument(string, CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadHasJustCultureInfo_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
MethodOverloadHasJustCultureInfo();
}
public static void MethodOverloadHasJustCultureInfo()
{
MethodOverloadHasJustCultureInfo(CultureInfo.CurrentCulture);
}
public static void MethodOverloadHasJustCultureInfo(CultureInfo provider)
{
Console.WriteLine(string.Format(provider, """"));
}
}",
GetCSharpResultAt(9, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadHasJustCultureInfo()", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadHasJustCultureInfo(CultureInfo)"));
}
[Fact]
public void CA1304_TargetMethodIsGenericsAndNonGenerics_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
TargetMethodIsNonGenerics();
TargetMethodIsGenerics<int>(); // No Diagnostics
}
public static void TargetMethodIsNonGenerics()
{
}
public static void TargetMethodIsNonGenerics<T>(CultureInfo provider)
{
}
public static void TargetMethodIsGenerics<V>()
{
}
public static void TargetMethodIsGenerics(CultureInfo provider)
{
}
}",
GetCSharpResultAt(9, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.TargetMethodIsNonGenerics()", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.TargetMethodIsNonGenerics<T>(CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadIncludeNonCandidates_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
MethodOverloadCount3();
}
public static void MethodOverloadCount3()
{
MethodOverloadCount3(CultureInfo.CurrentCulture);
}
public static void MethodOverloadCount3(CultureInfo provider)
{
Console.WriteLine(string.Format(provider, """"));
}
public static void MethodOverloadCount3(string b)
{
}
}",
GetCSharpResultAt(9, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadCount3()", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadCount3(CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadWithJustCultureInfoAsExtraParameter_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
MethodOverloadWithJustCultureInfoAsExtraParameter(2, 3);
}
public static void MethodOverloadWithJustCultureInfoAsExtraParameter(int a, int b)
{
MethodOverloadWithJustCultureInfoAsExtraParameter(a, b, CultureInfo.CurrentCulture);
}
public static void MethodOverloadWithJustCultureInfoAsExtraParameter(int a, int b, CultureInfo provider)
{
Console.WriteLine(string.Format(provider, """"));
}
}",
GetCSharpResultAt(9, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadWithJustCultureInfoAsExtraParameter(int, int)", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadWithJustCultureInfoAsExtraParameter(int, int, CultureInfo)"));
}
[Fact]
public void CA1304_NoDiagnostics_CSharp()
{
VerifyCSharp(@"
using System;
using System.Globalization;
public class CultureInfoTestClass2
{
public static void Method()
{
// No Diag - Inherited CultureInfo
MethodOverloadHasInheritedCultureInfo(""Foo"");
// No Diag - Since the overload has more parameters apart from CultureInfo
MethodOverloadHasMoreThanCultureInfo(""Foo"");
// No Diag - Since the CultureInfo parameter is neither as the first parameter nor as the last parameter
MethodOverloadWithJustCultureInfoAsInbetweenParameter("""", """");
}
public static void MethodOverloadHasInheritedCultureInfo(string format)
{
MethodOverloadHasInheritedCultureInfo(new DerivedCultureInfo(""""), format);
}
public static void MethodOverloadHasInheritedCultureInfo(DerivedCultureInfo provider, string format)
{
Console.WriteLine(string.Format(provider, format));
}
public static void MethodOverloadHasMoreThanCultureInfo(string format)
{
MethodOverloadHasMoreThanCultureInfo(format, null, CultureInfo.CurrentCulture);
}
public static void MethodOverloadHasMoreThanCultureInfo(string format, string what, CultureInfo provider)
{
Console.WriteLine(string.Format(provider, format));
}
public static void MethodOverloadWithJustCultureInfoAsInbetweenParameter(string a, string b)
{
MethodOverloadWithJustCultureInfoAsInbetweenParameter(a, CultureInfo.CurrentCulture, b);
}
public static void MethodOverloadWithJustCultureInfoAsInbetweenParameter(string a, CultureInfo provider, string b)
{
Console.WriteLine(string.Format(provider, """"));
}
}
public class DerivedCultureInfo : CultureInfo
{
public DerivedCultureInfo(string name):
base(name)
{
}
}");
}
[Fact]
public void CA1304_PlainString_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass0
Public Function SpecifyCultureInfo01() As String
Return ""foo"".ToLower()
End Function
End Class",
GetBasicResultAt(7, 16, SpecifyCultureInfoAnalyzer.Rule, "String.ToLower()", "CultureInfoTestClass0.SpecifyCultureInfo01()", "String.ToLower(CultureInfo)"));
}
[Fact]
public void CA1304_VariableStringInsideDifferentContainingSymbols_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass1
Public Function LowercaseAString(name As String) As String
Return name.ToLower()
End Function
Public Function InsideALambda(insideLambda As String) As String
Dim ddd As Func(Of String) = Function()
Return insideLambda.ToLower()
End Function
Return Nothing
End Function
Public ReadOnly Property PropertyWithALambda() As String
Get
Dim ddd As Func(Of String) = Function()
Return ""InsideGetter"".ToLower()
End Function
Return Nothing
End Get
End Property
End Class",
GetBasicResultAt(7, 16, SpecifyCultureInfoAnalyzer.Rule, "String.ToLower()", "CultureInfoTestClass1.LowercaseAString(String)", "String.ToLower(CultureInfo)"),
GetBasicResultAt(12, 48, SpecifyCultureInfoAnalyzer.Rule, "String.ToLower()", "CultureInfoTestClass1.InsideALambda(String)", "String.ToLower(CultureInfo)"),
GetBasicResultAt(21, 52, SpecifyCultureInfoAnalyzer.Rule, "String.ToLower()", "CultureInfoTestClass1.PropertyWithALambda()", "String.ToLower(CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadHasCultureInfoAsFirstArgument_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass2
Public Shared Sub Method()
MethodOverloadHasCultureInfoAsFirstArgument(""Foo"")
End Sub
Public Shared Sub MethodOverloadHasCultureInfoAsFirstArgument(format As String)
MethodOverloadHasCultureInfoAsFirstArgument(CultureInfo.CurrentCulture, format)
End Sub
Public Shared Sub MethodOverloadHasCultureInfoAsFirstArgument(provider As CultureInfo, format As String)
Console.WriteLine(String.Format(provider, format))
End Sub
End Class",
GetBasicResultAt(7, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsFirstArgument(String)", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsFirstArgument(CultureInfo, String)"));
}
[Fact]
public void CA1304_MethodOverloadHasCultureInfoAsLastArgument_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass2
Public Shared Sub Method()
MethodOverloadHasCultureInfoAsLastArgument(""Foo"")
End Sub
Public Shared Sub MethodOverloadHasCultureInfoAsLastArgument(format As String)
MethodOverloadHasCultureInfoAsLastArgument(format, CultureInfo.CurrentCulture)
End Sub
Public Shared Sub MethodOverloadHasCultureInfoAsLastArgument(format As String, provider As CultureInfo)
Console.WriteLine(String.Format(provider, format))
End Sub
Public Shared Sub MethodOverloadHasCultureInfoAsLastArgument(provider As CultureInfo, format As String)
Console.WriteLine(String.Format(provider, format))
End Sub
End Class",
GetBasicResultAt(7, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsLastArgument(String)", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadHasCultureInfoAsLastArgument(String, CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadHasJustCultureInfo_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass2
Public Shared Sub Method()
MethodOverloadHasJustCultureInfo()
End Sub
Public Shared Sub MethodOverloadHasJustCultureInfo()
MethodOverloadHasJustCultureInfo(CultureInfo.CurrentCulture)
End Sub
Public Shared Sub MethodOverloadHasJustCultureInfo(provider As CultureInfo)
Console.WriteLine(String.Format(provider, """"))
End Sub
End Class",
GetBasicResultAt(7, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadHasJustCultureInfo()", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadHasJustCultureInfo(CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadIncludeNonCandidates_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass2
Public Shared Sub Method()
MethodOverloadCount3()
End Sub
Public Shared Sub MethodOverloadCount3()
MethodOverloadCount3(CultureInfo.CurrentCulture)
End Sub
Public Shared Sub MethodOverloadCount3(provider As CultureInfo)
Console.WriteLine(String.Format(provider, """"))
End Sub
Public Shared Sub MethodOverloadCount3(b As String)
End Sub
End Class",
GetBasicResultAt(7, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadCount3()", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadCount3(CultureInfo)"));
}
[Fact]
public void CA1304_MethodOverloadWithJustCultureInfoAsExtraParameter_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass2
Public Shared Sub Method()
MethodOverloadWithJustCultureInfoAsExtraParameter(2, 3)
End Sub
Public Shared Sub MethodOverloadWithJustCultureInfoAsExtraParameter(a As Integer, b As Integer)
MethodOverloadWithJustCultureInfoAsExtraParameter(a, b, CultureInfo.CurrentCulture)
End Sub
Public Shared Sub MethodOverloadWithJustCultureInfoAsExtraParameter(a As Integer, b As Integer, provider As CultureInfo)
Console.WriteLine(String.Format(provider, """"))
End Sub
End Class",
GetBasicResultAt(7, 9, SpecifyCultureInfoAnalyzer.Rule, "CultureInfoTestClass2.MethodOverloadWithJustCultureInfoAsExtraParameter(Integer, Integer)", "CultureInfoTestClass2.Method()", "CultureInfoTestClass2.MethodOverloadWithJustCultureInfoAsExtraParameter(Integer, Integer, CultureInfo)"));
}
[Fact]
public void CA1304_NoDiagnostics_VisualBasic()
{
VerifyBasic(@"
Imports System
Imports System.Globalization
Public Class CultureInfoTestClass2
Public Shared Sub Method()
' No Diag - Inherited CultureInfo
MethodOverloadHasInheritedCultureInfo(""Foo"")
' No Diag - There are more parameters apart from CultureInfo
MethodOverloadHasMoreThanCultureInfo(""Foo"")
' No Diag - The CultureInfo parameter is neither the first parameter nor the last parameter
MethodOverloadWithJustCultureInfoAsInbetweenParameter("""", """")
End Sub
Public Shared Sub MethodOverloadHasInheritedCultureInfo(format As String)
MethodOverloadHasInheritedCultureInfo(New DerivedCultureInfo(""""), format)
End Sub
Public Shared Sub MethodOverloadHasInheritedCultureInfo(provider As DerivedCultureInfo, format As String)
Console.WriteLine(String.Format(provider, format))
End Sub
Public Shared Sub MethodOverloadHasMoreThanCultureInfo(format As String)
MethodOverloadHasMoreThanCultureInfo(format, Nothing, CultureInfo.CurrentCulture)
End Sub
Public Shared Sub MethodOverloadHasMoreThanCultureInfo(format As String, what As String, provider As CultureInfo)
Console.WriteLine(String.Format(provider, format))
End Sub
Public Shared Sub MethodOverloadWithJustCultureInfoAsInbetweenParameter(a As String, b As String)
MethodOverloadWithJustCultureInfoAsInbetweenParameter(a, CultureInfo.CurrentCulture, b)
End Sub
Public Shared Sub MethodOverloadWithJustCultureInfoAsInbetweenParameter(a As String, provider As CultureInfo, b As String)
Console.WriteLine(String.Format(provider, """"))
End Sub
End Class
Public Class DerivedCultureInfo
Inherits CultureInfo
Public Sub New(name As String)
MyBase.New(name)
End Sub
End Class");
}
}
}
| |
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.ComponentModel.Composition;
using System.ComponentModel.Composition.Hosting;
using System.Diagnostics;
using System.Drawing;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Windows.Forms;
using System.Xml;
using Bloom.Book;
using Bloom.Collection;
using Bloom.Api;
using Bloom.Publish.PDF;
using BloomTemp;
using DesktopAnalytics;
using SIL.IO;
using SIL.Xml;
using Bloom.ToPalaso.Experimental;
namespace Bloom.Publish
{
/// <summary>
/// Contains the logic behind the PublishView control, which involves creating a pdf from the html book and letting you print it,
/// making epubs, and various other publication paths.
/// </summary>
public class PublishModel : IDisposable
{
public BookSelection BookSelection { get; private set; }
public BookServer BookServer { get { return _bookServer; } }
public string PdfFilePath { get; private set; }
public enum DisplayModes
{
WaitForUserToChooseSomething,
Working,
ShowPdf,
Upload,
Printing,
ResumeAfterPrint,
Android,
EPUB,
NotPublishable
}
public enum BookletPortions
{
None,
AllPagesNoBooklet,
BookletCover,
BookletPages,//include front and back matter that isn't coverstock
InnerContent//excludes all front and back matter
}
public enum BookletLayoutMethod
{
NoBooklet,
SideFold,
CutAndStack,
Calendar
}
private Book.Book _currentlyLoadedBook;
private PdfMaker _pdfMaker;
private readonly CurrentEditableCollectionSelection _currentBookCollectionSelection;
private readonly CollectionSettings _collectionSettings;
private readonly BookServer _bookServer;
private readonly BookThumbNailer _thumbNailer;
private string _lastDirectory;
public PublishModel(BookSelection bookSelection, PdfMaker pdfMaker, CurrentEditableCollectionSelection currentBookCollectionSelection, CollectionSettings collectionSettings,
BookServer bookServer, BookThumbNailer thumbNailer)
{
BookSelection = bookSelection;
_pdfMaker = pdfMaker;
_pdfMaker.CompressPdf = true; // See http://issues.bloomlibrary.org/youtrack/issue/BL-3721.
//_pdfMaker.EngineChoice = collectionSettings.PdfEngineChoice;
_currentBookCollectionSelection = currentBookCollectionSelection;
ShowCropMarks=false;
_collectionSettings = collectionSettings;
_bookServer = bookServer;
_thumbNailer = thumbNailer;
bookSelection.SelectionChanged += OnBookSelectionChanged;
//we don't want to default anymore: BookletPortion = BookletPortions.BookletPages;
CanPublish = DeterminePublishability();
}
public bool CanPublish { get; set; }
public PublishView View { get; set; }
// True when we are showing the controls for uploading. (Review: does this belong in the model or view?)
public bool UploadMode { get; set; }
// True when showing an ePUB preview.
public bool EpubMode;
public bool PdfGenerationSucceeded { get; set; }
private void OnBookSelectionChanged(object sender, BookSelectionChangedEventArgs bookSelectionChangedEventArgs)
{
//some of this checking is about bl-272, which was replicated by having one book, going to publish, then deleting that last book.
if (BookSelection != null && View != null && BookSelection.CurrentSelection!=null && _currentlyLoadedBook != BookSelection.CurrentSelection && View.Visible)
{
PageLayout = BookSelection.CurrentSelection.GetLayout();
CanPublish = DeterminePublishability();
}
}
private bool DeterminePublishability()
{
// At this point (5.1), this should only be false iff:
// - User is not in Enterprise mode AND
// - Book contains overlay elements AND
// - Book is not a translated shell
var overlayElementNodes = BookSelection?.CurrentSelection?.RawDom.SelectNodes("//div[contains(@class, 'bloom-textOverPicture')]");
var bookContainsOverlayElements = (overlayElementNodes?.Count ?? 0) > 0;
var bookIsTranslatedFromShell = BookSelection?.CurrentSelection?.RecordedAsLockedDown??false;
return _collectionSettings.HaveEnterpriseFeatures || !bookContainsOverlayElements || bookIsTranslatedFromShell;
}
internal static string GetPreparingImageFilter()
{
var msgFmt = L10NSharp.LocalizationManager.GetString("ImageUtils.PreparingImage", "Preparing image: {0}", "{0} is a placeholder for the image file name");
var idx = msgFmt.IndexOf("{0}");
return idx >= 0 ? msgFmt.Substring(0,idx) : msgFmt; // translated string is missing the filename placeholder?
}
public void LoadBook(BackgroundWorker worker, DoWorkEventArgs doWorkEventArgs)
{
try
{
using (var tempHtml = MakeFinalHtmlForPdfMaker())
{
if (doWorkEventArgs.Cancel)
return;
BookletLayoutMethod layoutMethod = GetBookletLayoutMethod();
// Check memory for the benefit of developers. The user won't see anything.
Bloom.Utils.MemoryManagement.CheckMemory(true, "about to create PDF file", false);
_pdfMaker.MakePdf(new PdfMakingSpecs() {InputHtmlPath = tempHtml.Key,
OutputPdfPath=PdfFilePath,
PaperSizeName=PageLayout.SizeAndOrientation.PageSizeName,
Landscape=PageLayout.SizeAndOrientation.IsLandScape,
SaveMemoryMode=_currentlyLoadedBook.UserPrefs.ReducePdfMemoryUse,
LayoutPagesForRightToLeft=LayoutPagesForRightToLeft,
BooketLayoutMethod=layoutMethod,
BookletPortion=BookletPortion,
BookIsFullBleed = _currentlyLoadedBook.FullBleed,
PrintWithFullBleed = GetPrintingWithFullBleed(),
Cmyk = _currentlyLoadedBook.UserPrefs.CmykPdf},
worker, doWorkEventArgs, View );
// Warn the user if we're starting to use too much memory.
Bloom.Utils.MemoryManagement.CheckMemory(false, "finished creating PDF file", true);
}
}
catch (Exception e)
{
//we can't safely do any ui-related work from this thread, like putting up a dialog
doWorkEventArgs.Result = e;
// SIL.Reporting.ErrorReport.NotifyUserOfProblem(e, "There was a problem creating a PDF from this book.");
// SetDisplayMode(DisplayModes.WaitForUserToChooseSomething);
// return;
}
}
private BookletLayoutMethod GetBookletLayoutMethod()
{
BookletLayoutMethod layoutMethod;
if (this.BookletPortion == BookletPortions.AllPagesNoBooklet)
layoutMethod = BookletLayoutMethod.NoBooklet;
else
layoutMethod = BookSelection.CurrentSelection.GetBookletLayoutMethod(PageLayout);
return layoutMethod;
}
public bool IsCurrentBookFullBleed => _currentlyLoadedBook != null && _currentlyLoadedBook.FullBleed;
private bool GetPrintingWithFullBleed()
{
return _currentlyLoadedBook.FullBleed && GetBookletLayoutMethod() == BookletLayoutMethod.NoBooklet && _currentlyLoadedBook.UserPrefs.FullBleed;
}
private bool LayoutPagesForRightToLeft
{
get { return _currentlyLoadedBook.BookData.Language1.IsRightToLeft; }
}
private SimulatedPageFile MakeFinalHtmlForPdfMaker()
{
if (_currentlyLoadedBook == null)
_currentlyLoadedBook = BookSelection.CurrentSelection;
PdfFilePath = GetPdfPath(Path.GetFileName(_currentlyLoadedBook.FolderPath));
var orientationChanging = BookSelection.CurrentSelection.GetLayout().SizeAndOrientation.IsLandScape !=
PageLayout.SizeAndOrientation.IsLandScape;
var dom = BookSelection.CurrentSelection.GetDomForPrinting(BookletPortion, _currentBookCollectionSelection.CurrentSelection,
_bookServer, orientationChanging, PageLayout);
AddStylesheetClasses(dom.RawDom);
PageLayout.UpdatePageSplitMode(dom.RawDom);
if (_currentlyLoadedBook.FullBleed && !GetPrintingWithFullBleed())
{
ClipBookToRemoveFullBleed(dom);
}
XmlHtmlConverter.MakeXmlishTagsSafeForInterpretationAsHtml(dom.RawDom);
dom.UseOriginalImages = true; // don't want low-res images or transparency in PDF.
return BloomServer.MakeSimulatedPageFileInBookFolder(dom, source:BloomServer.SimulatedPageFileSource.Pub);
}
private void ClipBookToRemoveFullBleed(HtmlDom dom)
{
// example: A5 book is full bleed. What the user saw and configured in Edit mode is RA5 paper, 3mm larger on each side.
// But we're not printing for full bleed. We will create an A5 page with no inset trim box.
// We want it to hold the trim box part of the RA5 page.
// to do this, we simply need to move the bloom-page element up and left by 3mm. Clipping to the page will do the rest.
// It would be more elegant to do this by introducing a CSS rule involving .bloom-page, but to introduce a new stylesheet
// we have to make it findable in the book folder, which is messy. Or, we could add a stylesheet element to the DOM;
// but that's messy, too, we need stuff like /*<![CDATA[*/ to make the content survive the trip from XML to HTML.
// So it's easiest just to stick it in the style attribute of each page.
foreach (var page in dom.SafeSelectNodes("//div[contains(@class, 'bloom-page')]").Cast<XmlElement>())
{
page.SetAttribute("style", "margin-left: -3mm; margin-top: -3mm;");
}
}
private void AddStylesheetClasses(XmlDocument dom)
{
if (this.GetPrintingWithFullBleed())
{
HtmlDom.AddClassToBody(dom, "publishingWithFullBleed");
}
else
{
HtmlDom.AddClassToBody(dom, "publishingWithoutFullBleed");
}
HtmlDom.AddPublishClassToBody(dom);
if (LayoutPagesForRightToLeft)
HtmlDom.AddRightToLeftClassToBody(dom);
HtmlDom.AddHidePlaceHoldersClassToBody(dom);
if (BookSelection.CurrentSelection.GetDefaultBookletLayoutMethod() == PublishModel.BookletLayoutMethod.Calendar)
{
HtmlDom.AddCalendarFoldClassToBody(dom);
}
}
private string GetPdfPath(string fname)
{
string path = null;
// Sanitize fileName first
string fileName = BookStorage.SanitizeNameForFileSystem(fname);
for (int i = 0; i < 100; i++)
{
path = Path.Combine(Path.GetTempPath(), string.Format("{0}-{1}.pdf", fileName, i));
if (!RobustFile.Exists(path))
break;
try
{
RobustFile.Delete(path);
break;
}
catch (Exception)
{
//couldn't delete it? then increment the suffix and try again
}
}
return path;
}
DisplayModes _currentDisplayMode = DisplayModes.WaitForUserToChooseSomething;
internal DisplayModes DisplayMode
{
get
{
return _currentDisplayMode;
}
set
{
_currentDisplayMode = value;
if (View != null)
View.Invoke((Action) (() => View.SetDisplayMode(value)));
}
}
public void Dispose()
{
if (RobustFile.Exists(PdfFilePath))
{
try
{
RobustFile.Delete(PdfFilePath);
}
catch (Exception)
{
}
}
GC.SuppressFinalize(this);
}
public BookletPortions BookletPortion { get; set; }
/// <summary>
/// The book itself has a layout, but we can override it here during publishing
/// </summary>
public Layout PageLayout { get; set; }
public bool ShowCropMarks
{
get { return _pdfMaker.ShowCropMarks; }
set { _pdfMaker.ShowCropMarks = value; }
}
public bool AllowEPUB => CanPublish;
public bool AllowAndroid => CanPublish;
public bool AllowUpload => BookSelection.CurrentSelection.BookInfo.AllowUploading && CanPublish;
public bool AllowPdf => CanPublish;
public bool AllowPdfBooklet
{
get
{
// Large page sizes can't make booklets. See http://issues.bloomlibrary.org/youtrack/issue/BL-4155.
var size = PageLayout.SizeAndOrientation.PageSizeName;
return AllowPdf && BookSelection.CurrentSelection.BookInfo.BookletMakingIsAppropriate &&
(size != "A4" && size != "A3" && size != "B5" && size != "Letter" && size != "Device16x9");
}
}
// currently the only cover option we have is a booklet one
public bool AllowPdfCover => AllowPdfBooklet;
public void Save()
{
try
{
// Give a slight preference to USB keys, though if they used a different directory last time, we favor that.
if (string.IsNullOrEmpty(_lastDirectory) || !Directory.Exists(_lastDirectory))
{
var drives = SIL.UsbDrive.UsbDriveInfo.GetDrives();
if (drives != null && drives.Count > 0)
{
_lastDirectory = drives[0].RootDirectory.FullName;
}
}
using (var dlg = new DialogAdapters.SaveFileDialogAdapter())
{
if (!string.IsNullOrEmpty(_lastDirectory) && Directory.Exists(_lastDirectory))
dlg.InitialDirectory = _lastDirectory;
var portion = "";
switch (BookletPortion)
{
case BookletPortions.None:
Debug.Fail("Save should not be enabled");
return;
case BookletPortions.AllPagesNoBooklet:
portion = "Pages";
break;
case BookletPortions.BookletCover:
portion = "Cover";
break;
case BookletPortions.BookletPages:
portion = "Inside";
break;
default:
throw new ArgumentOutOfRangeException();
}
string forPrintShop =
_currentlyLoadedBook.UserPrefs.CmykPdf || _currentlyLoadedBook.UserPrefs.FullBleed
? "-printshop"
: "";
string suggestedName = string.Format($"{Path.GetFileName(_currentlyLoadedBook.FolderPath)}-{_currentlyLoadedBook.GetFilesafeLanguage1Name("en")}-{portion}{forPrintShop}.pdf");
dlg.FileName = suggestedName;
var pdfFileLabel = L10NSharp.LocalizationManager.GetString(@"PublishTab.PdfMaker.PdfFile",
"PDF File",
@"displayed as file type for Save File dialog.");
pdfFileLabel = pdfFileLabel.Replace("|", "");
dlg.Filter = String.Format("{0}|*.pdf", pdfFileLabel);
dlg.OverwritePrompt = true;
if (DialogResult.OK == dlg.ShowDialog())
{
_lastDirectory = Path.GetDirectoryName(dlg.FileName);
if (_currentlyLoadedBook.UserPrefs.CmykPdf)
{
// PDF for Printshop (CMYK US Web Coated V2)
ProcessPdfFurtherAndSave(ProcessPdfWithGhostscript.OutputType.Printshop, dlg.FileName);
} else {
// we want the simple PDF we already made.
RobustFile.Copy(PdfFilePath, dlg.FileName, true);
}
Analytics.Track("Save PDF", new Dictionary<string, string>()
{
{"Portion", Enum.GetName(typeof(BookletPortions), BookletPortion)},
{"Layout", PageLayout.ToString()},
{"BookId", BookSelection.CurrentSelection.ID },
{"Country", _collectionSettings.Country}
});
}
}
}
catch (Exception err)
{
SIL.Reporting.ErrorReport.NotifyUserOfProblem("Bloom was not able to save the PDF. {0}", err.Message);
}
}
private void ProcessPdfFurtherAndSave(ProcessPdfWithGhostscript.OutputType type, string outputPath)
{
if (type == ProcessPdfWithGhostscript.OutputType.Printshop &&
!Bloom.Properties.Settings.Default.AdobeColorProfileEula2003Accepted)
{
var prolog = L10NSharp.LocalizationManager.GetString(@"PublishTab.PrologToAdobeEula",
"Bloom uses Adobe color profiles to convert PDF files from using RGB color to using CMYK color. This is part of preparing a \"PDF for a print shop\". You must agree to the following license in order to perform this task in Bloom.",
@"Brief explanation of what this license is and why the user needs to agree to it");
using (var dlg = new Bloom.Registration.LicenseDialog("AdobeColorProfileEULA.htm", prolog))
{
dlg.Text = L10NSharp.LocalizationManager.GetString(@"PublishTab.AdobeEulaTitle",
"Adobe Color Profile License Agreement", @"dialog title for license agreement");
if (dlg.ShowDialog() != DialogResult.OK)
{
var msg = L10NSharp.LocalizationManager.GetString(@"PublishTab.PdfNotSavedWhy",
"The PDF file has not been saved because you chose not to allow producing a \"PDF for print shop\".",
@"explanation that file was not saved displayed in a message box");
var heading = L10NSharp.LocalizationManager.GetString(@"PublishTab.PdfNotSaved",
"PDF Not Saved", @"title for the message box");
MessageBox.Show(msg, heading, MessageBoxButtons.OK, MessageBoxIcon.Information);
return;
}
}
Bloom.Properties.Settings.Default.AdobeColorProfileEula2003Accepted = true;
Bloom.Properties.Settings.Default.Save();
}
using (var progress = new SIL.Windows.Forms.Progress.ProgressDialog())
{
progress.ProgressRangeMinimum = 0;
progress.ProgressRangeMaximum = 100;
progress.Overview = L10NSharp.LocalizationManager.GetString(@"PublishTab.PdfMaker.Saving",
"Saving PDF...",
@"Message displayed in a progress report dialog box");
progress.BackgroundWorker = new BackgroundWorker();
progress.BackgroundWorker.DoWork += (object sender, DoWorkEventArgs e) => {
var pdfProcess = new ProcessPdfWithGhostscript(type, sender as BackgroundWorker);
pdfProcess.ProcessPdfFile(PdfFilePath, outputPath, false);
};
progress.BackgroundWorker.ProgressChanged += (object sender, ProgressChangedEventArgs e) => {
progress.Progress = e.ProgressPercentage;
var status = e.UserState as string;
if (!String.IsNullOrWhiteSpace(status))
progress.StatusText = status;
};
progress.ShowDialog(); // will start the background process when loaded/showing
if (progress.ProgressStateResult != null && progress.ProgressStateResult.ExceptionThatWasEncountered != null)
{
string shortMsg = L10NSharp.LocalizationManager.GetString(@"PublishTab.PdfMaker.ErrorSaving",
"Error compressing or recoloring the PDF file",
@"Message briefly displayed to the user in a toast");
var longMsg = String.Format("Exception encountered processing the PDF file: {0}", progress.ProgressStateResult.ExceptionThatWasEncountered);
NonFatalProblem.Report(ModalIf.None, PassiveIf.All, shortMsg, longMsg, progress.ProgressStateResult.ExceptionThatWasEncountered);
}
}
}
public void DebugCurrentPDFLayout()
{
// var dom = BookSelection.CurrentSelection.GetDomForPrinting(BookletPortion, _currentBookCollectionSelection.CurrentSelection, _bookServer);
//
// SizeAndOrientation.UpdatePageSizeAndOrientationClasses(dom, PageLayout);
// PageLayout.UpdatePageSplitMode(dom);
//
// XmlHtmlConverter.MakeXmlishTagsSafeForInterpretationAsHtml(dom);
// var tempHtml = BloomTemp.TempFile.CreateHtm5FromXml(dom); //nb: we intentially don't ever delete this, to aid in debugging
// //var tempHtml = TempFile.WithExtension(".htm");
//
// var settings = new XmlWriterSettings {Indent = true, CheckCharacters = true};
// using (var writer = XmlWriter.Create(tempHtml.Path, settings))
// {
// dom.WriteContentTo(writer);
// writer.Close();
// }
// System.Diagnostics.Process.Start(tempHtml.Path);
var htmlFilePath = MakeFinalHtmlForPdfMaker().Key;
if (SIL.PlatformUtilities.Platform.IsWindows)
Process.Start("Firefox.exe", '"' + htmlFilePath + '"');
else
SIL.Program.Process.SafeStart("xdg-open", '"' + htmlFilePath + '"');
}
public void UpdateModelUponActivation()
{
if (BookSelection.CurrentSelection == null)
return;
_currentlyLoadedBook = BookSelection.CurrentSelection;
PageLayout = _currentlyLoadedBook.GetLayout();
// BL-8648: In case we have an older version of a book (downloaded, e.g.) and the user went
// straight to the Publish tab avoiding the Edit tab, we could arrive here needing to update
// things. We choose to do the original book update here (when the user clicks on the Publish tab),
// instead of the various places that we publish the book.
// Note that the BringBookUpToDate() called by PublishHelper.MakeDeviceXmatterTempBook() and
// called by BloomReaderFileMaker.PrepareBookForBloomReader() applies to a copy of the book
// and is done in a way that explicitly avoids updating images. This call updates the images,
// if needed, as a permanent fix.
using (var dlg = new ProgressDialogForeground())
{
dlg.ShowAndDoWork(progress => _currentlyLoadedBook.BringBookUpToDate(progress));
}
}
[Import("GetPublishingMenuCommands")]//, AllowDefault = true)]
private Func<IEnumerable<ToolStripItem>> _getExtensionMenuItems;
public IEnumerable<HtmlDom> GetPageDoms()
{
if (BookSelection.CurrentSelection.IsFolio)
{
foreach (var bi in _currentBookCollectionSelection.CurrentSelection.GetBookInfos())
{
var book = _bookServer.GetBookFromBookInfo(bi);
//need to hide the "notes for illustrators" on SHRP, which is controlled by the layout
book.SetLayout(new Layout()
{
SizeAndOrientation = SizeAndOrientation.FromString("B5Portrait"),
Style = "HideProductionNotes"
});
foreach (var page in book.GetPages())
{
//yield return book.GetPreviewXmlDocumentForPage(page);
var previewXmlDocumentForPage = book.GetPreviewXmlDocumentForPage(page);
BookStorage.SetBaseForRelativePaths(previewXmlDocumentForPage, book.FolderPath);
AddStylesheetClasses(previewXmlDocumentForPage.RawDom);
yield return previewXmlDocumentForPage;
}
}
}
else //this one is just for testing, it's not especially fruitful to export for a single book
{
//need to hide the "notes for illustrators" on SHRP, which is controlled by the layout
BookSelection.CurrentSelection.SetLayout(new Layout()
{
SizeAndOrientation = SizeAndOrientation.FromString("B5Portrait"),
Style = "HideProductionNotes"
});
foreach (var page in BookSelection.CurrentSelection.GetPages())
{
var previewXmlDocumentForPage = BookSelection.CurrentSelection.GetPreviewXmlDocumentForPage(page);
//get the original images, not compressed ones (just in case the thumbnails are, like, full-size & they want quality)
BookStorage.SetBaseForRelativePaths(previewXmlDocumentForPage, BookSelection.CurrentSelection.FolderPath);
AddStylesheetClasses(previewXmlDocumentForPage.RawDom);
yield return previewXmlDocumentForPage;
}
}
}
public void GetThumbnailAsync(int width, int height, HtmlDom dom,Action<Image> onReady ,Action<Exception> onError)
{
var thumbnailOptions = new HtmlThumbNailer.ThumbnailOptions()
{
BackgroundColor = Color.White,
BorderStyle = HtmlThumbNailer.ThumbnailOptions.BorderStyles.None,
CenterImageUsingTransparentPadding = false,
Height = height,
Width = width
};
dom.UseOriginalImages = true; // apparently these thumbnails can be big...anyway we want printable images.
_thumbNailer.HtmlThumbNailer.GetThumbnailAsync(String.Empty, string.Empty, dom, thumbnailOptions,onReady, onError);
}
public IEnumerable<ToolStripItem> GetExtensionMenuItems()
{
//for now we're not doing real extension dlls, just kind of faking it. So we will limit this load
//to books we know go with this currently "built-in" "extension" for SIL LEAD's SHRP Project.
if (SHRP_PupilBookExtension.ExtensionIsApplicable(BookSelection.CurrentSelection))
{
//load any extension assembly found in the template's root directory
//var catalog = new DirectoryCatalog(this.BookSelection.CurrentSelection.FindTemplateBook().FolderPath, "*.dll");
var catalog = new AssemblyCatalog(Assembly.GetExecutingAssembly());
var container = new CompositionContainer(catalog);
//inject what we have to offer for the extension to consume
container.ComposeExportedValue<string>("PathToBookFolder",BookSelection.CurrentSelection.FolderPath);
container.ComposeExportedValue<string>("Language1Iso639Code", _currentlyLoadedBook.BookData.Language1.Iso639Code);
container.ComposeExportedValue<Func<IEnumerable<HtmlDom>>>(GetPageDoms);
// container.ComposeExportedValue<Func<string>>("pathToPublishedHtmlFile",GetFileForPrinting);
//get the original images, not compressed ones (just in case the thumbnails are, like, full-size & they want quality)
container.ComposeExportedValue<Action<int, int, HtmlDom, Action<Image>, Action<Exception>>>(GetThumbnailAsync);
container.SatisfyImportsOnce(this);
return _getExtensionMenuItems == null ? new List<ToolStripItem>() : _getExtensionMenuItems();
}
else
{
return new List<ToolStripMenuItem>();
}
}
public void ReportAnalytics(string eventName)
{
Analytics.Track(eventName, new Dictionary<string, string>()
{
{"BookId", BookSelection.CurrentSelection.ID},
{"Country", _collectionSettings.Country}
});
}
/// <summary>
/// Remove all text data that is not in a desired language.
/// </summary>
/// <remarks>
/// See https://issues.bloomlibrary.org/youtrack/issue/BL-7124.
/// See https://issues.bloomlibrary.org/youtrack/issue/BL-7998 for when we need to prune xmatter pages.
/// </remarks>
public static void RemoveUnwantedLanguageData(HtmlDom dom, IEnumerable<string> languagesToInclude, string nationalLang=null)
{
//Debug.Write("PublishModel.RemoveUnwantedLanguageData(): languagesToInclude =");
//foreach (var lang in languagesToInclude)
// Debug.Write($" {lang}");
//Debug.WriteLine();
// Place the desired language tags plus the two standard pseudolanguage tags in a HashSet
// for fast access.
var contentLanguages = new HashSet<string>();
foreach (var lang in languagesToInclude)
contentLanguages.Add(lang);
contentLanguages.Add("*");
contentLanguages.Add("z");
// Don't change the div#bloomDataDiv: thus we have an outer loop that
// selects only xmatter and user content pages.
// While we could probably safely remove elements from div#bloomDataDiv,
// we decided to play it very safe for now and leave it all intact.
// The default behavior is also to not touch xmatter pages. But if the code for the national language (aka L2) is
// provided, then we prune xmatter pages as well but add the national language to the list of languages whose data
// we keep in the xmatter.
// We can always come back to this if we realize we should be removing more.
// If that happens, removing the outer loop and checking the data-book attribute (and
// maybe the data-derived attribute) may become necessary.
foreach (var page in dom.RawDom.SafeSelectNodes("//div[contains(@class,'bloom-page')]").Cast<XmlElement>().ToList())
{
var isXMatter = !String.IsNullOrWhiteSpace(page.GetAttribute("data-xmatter-page"));
if (isXMatter && nationalLang == null)
continue; // default behavior is to skip pruning data from xmatter
foreach (var div in page.SafeSelectNodes(".//div[@lang]").Cast<XmlElement>().ToList())
{
var lang = div.GetAttribute("lang");
if (String.IsNullOrEmpty(lang) || contentLanguages.Contains(lang) || (isXMatter && lang == nationalLang))
continue;
var classAttr = div.GetAttribute("class");
// retain the .pageLabel and .pageDescription divs (which are always lang='en')
// Also retain any .Instructions-style divs, which may have the original with lang='en', and
// which are usually translated to the national language.
// REVIEW: are there any other classes that should be checked here?
if (classAttr.Contains("pageLabel") || classAttr.Contains("pageDescription") || classAttr.Contains("Instructions-style"))
continue;
// check whether any descendant divs are desired before deleting this div.
bool deleteDiv = true;
foreach (var subdiv in div.SafeSelectNodes(".//div[@lang]").Cast<XmlElement>().ToList())
{
var sublang = subdiv.GetAttribute("lang");
if (String.IsNullOrEmpty(sublang))
continue;
if (contentLanguages.Contains(sublang) || (isXMatter && sublang == nationalLang))
{
deleteDiv = false;
break;
}
}
// Remove this div
if (deleteDiv)
div.ParentNode.RemoveChild(div);
}
}
}
// This is a highly experimental export which may evolve as we work on this with Age of Learning.
public void ExportAudioFiles1PerPage()
{
var container = System.IO.Path.Combine(System.IO.Path.GetTempPath(), "bloom audio export");
Directory.CreateDirectory(container);
var parentFolderForAllOfTheseExports = TemporaryFolder.TrackExisting(container);
var folderForThisBook = new TemporaryFolder(parentFolderForAllOfTheseExports, Path.GetFileName(this.BookSelection.CurrentSelection.FolderPath));
var pageIndex = 0;
foreach (XmlElement pageElement in this.BookSelection.CurrentSelection.GetPageElements())
{
++pageIndex;
//var durations = new StringBuilder();
//var accumulatedDuration = 0;
try
{
// These elements are marked as audio-sentence but we're not sure yet if the user actually recorded them yet
var audioSentenceElements = HtmlDom.SelectAudioSentenceElements(pageElement)
.Cast<XmlElement>();
var mergeFiles =
audioSentenceElements
.Select(s =>
AudioProcessor.GetOrCreateCompressedAudio(
this.BookSelection.CurrentSelection.FolderPath, s.Attributes["id"]?.Value))
.Where(s => !string.IsNullOrEmpty(s));
if (mergeFiles.Any())
{
// enhance: it would be nice if we could somehow provide info on what should be highlighted and when,
// though I don't know how that would work with Age of Learning's PDF viewer.
// The following was a start on that before I realized that I don't know how that would be accomplished,
// but I'm leaving it here in case I pick it up again.
// foreach (var audioSentenceElement in audioSentenceElements)
//{
// var id = HtmlDom.GetAttributeValue(audioSentenceElement, "id");
// var element = this.BookSelection.CurrentSelection.OurHtmlDom.SelectSingleNode($"//div[@id='{id}']");
// var duration = HtmlDom.GetAttributeValue(audioSentenceElement, "data-duration");
// Here we would need to determine the duration if data-duration is empty.
// accumulatedDuration += int.Parse(duration);
// durations.AppendLine(accumulatedDuration.ToString() + "\t" + duration);
//}
var bookName = Path.GetFileName(this.BookSelection.CurrentSelection.FolderPath);// not title, that isn't sanitized to safe characters
var filename =
$"{bookName}_{this._currentlyLoadedBook.BookData.Language1.Name}_{pageIndex:0000}.mp3".Replace(' ','_');
var combinedAudioPath = Path.Combine(folderForThisBook.FolderPath, filename);
var errorMessage = AudioProcessor.MergeAudioFiles(mergeFiles, combinedAudioPath);
if (errorMessage != null)
{
File.WriteAllText(Path.Combine(folderForThisBook.FolderPath, $"error page{pageIndex}.txt"),
errorMessage);
}
//File.WriteAllText(Path.Combine(folderForThisBook.FolderPath, $"page{pageIndex} timings.txt"),
// durations.ToString());
}
}
catch (Exception e)
{
File.WriteAllText(Path.Combine(folderForThisBook.FolderPath, $"error page{pageIndex}.txt"),
e.Message);
}
}
Process.Start(folderForThisBook.FolderPath);
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace TestFlask.API.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// (c) Copyright 2012 Hewlett-Packard Development Company, L.P.
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
using System;
using System.Diagnostics;
using System.IO;
using System.Reflection;
using HpToolsLauncher.Properties;
namespace HpToolsLauncher
{
public class ApiTestRunner : IFileSysTestRunner
{
public const string STRunnerName = "ServiceTestExecuter.exe";
public const string STRunnerTestArg = @"-test";
public const string STRunnerReportArg = @"-report";
public const string STRunnerInputParamsArg = @"-inParams";
private const int PollingTimeMs = 500;
private bool _stCanRun;
private string _stExecuterPath = Directory.GetCurrentDirectory();
private readonly IAssetRunner _runner;
private TimeSpan _timeout = TimeSpan.MaxValue;
private Stopwatch _stopwatch = null;
private RunCancelledDelegate _runCancelled;
/// <summary>
/// constructor
/// </summary>
/// <param name="runner">parent runner</param>
/// <param name="timeout">the global timout</param>
public ApiTestRunner(IAssetRunner runner, TimeSpan timeout)
{
_stopwatch = Stopwatch.StartNew();
_timeout = timeout;
_stCanRun = TrySetSTRunner();
_runner = runner;
}
/// <summary>
/// Search ServiceTestExecuter.exe in the current running process directory,
/// and if not found, in the installation folder (taken from registry)
/// </summary>
/// <returns></returns>
public bool TrySetSTRunner()
{
if (File.Exists(STRunnerName))
return true;
_stExecuterPath = Helper.GetSTInstallPath();
if ((!String.IsNullOrEmpty(_stExecuterPath)))
{
_stExecuterPath += "bin";
return true;
}
_stCanRun = false;
return false;
}
/// <summary>
/// runs the given test
/// </summary>
/// <param name="testinf"></param>
/// <param name="errorReason"></param>
/// <param name="runCancelled">cancellation delegate, holds the function that checks cancellation</param>
/// <returns></returns>
public TestRunResults RunTest(TestInfo testinf, ref string errorReason, RunCancelledDelegate runCancelled)
{
TestRunResults runDesc = new TestRunResults();
ConsoleWriter.ActiveTestRun = runDesc;
ConsoleWriter.WriteLine(DateTime.Now.ToString(Launcher.DateFormat) + " Running: " + testinf.TestPath);
runDesc.ReportLocation = testinf.TestPath;
runDesc.ErrorDesc = errorReason;
runDesc.TestPath = testinf.TestPath;
runDesc.TestState = TestState.Unknown;
if (!Helper.IsServiceTestInstalled())
{
runDesc.TestState = TestState.Error;
runDesc.ErrorDesc = string.Format(Resources.LauncherStNotInstalled, System.Environment.MachineName);
ConsoleWriter.WriteErrLine(runDesc.ErrorDesc);
Environment.ExitCode = (int)Launcher.ExitCodeEnum.Failed;
return runDesc;
}
_runCancelled = runCancelled;
if (!_stCanRun)
{
runDesc.TestState = TestState.Error;
runDesc.ErrorDesc = Resources.STExecuterNotFound;
return runDesc;
}
string fileName = Path.Combine(_stExecuterPath, STRunnerName);
if (!File.Exists(fileName))
{
runDesc.TestState = TestState.Error;
runDesc.ErrorDesc = Resources.STExecuterNotFound;
ConsoleWriter.WriteErrLine(Resources.STExecuterNotFound);
return runDesc;
}
//write the input parameter xml file for the API test
string paramFileName = Guid.NewGuid().ToString().Replace("-", string.Empty).Substring(0, 10);
string tempPath = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), "TestParams");
Directory.CreateDirectory(tempPath);
string paramsFilePath = Path.Combine(tempPath, "params" + paramFileName + ".xml");
string paramFileContent = testinf.GenerateAPITestXmlForTest();
string argumentString = "";
if (!string.IsNullOrWhiteSpace(paramFileContent))
{
File.WriteAllText(paramsFilePath, paramFileContent);
argumentString = String.Format("{0} \"{1}\" {2} \"{3}\" {4} \"{5}\"", STRunnerTestArg, testinf.TestPath, STRunnerReportArg, runDesc.ReportLocation, STRunnerInputParamsArg, paramsFilePath);
}
else
{
argumentString = String.Format("{0} \"{1}\" {2} \"{3}\"", STRunnerTestArg, testinf.TestPath, STRunnerReportArg, runDesc.ReportLocation);
}
Stopwatch s = Stopwatch.StartNew();
runDesc.TestState = TestState.Running;
if (!ExecuteProcess(fileName,
argumentString,
ref errorReason))
{
runDesc.TestState = TestState.Error;
runDesc.ErrorDesc = errorReason;
}
else
{
runDesc.ReportLocation = Path.Combine(runDesc.ReportLocation, "Report");
if (!File.Exists(Path.Combine(runDesc.ReportLocation, "Results.xml")) && !File.Exists(Path.Combine(runDesc.ReportLocation, "run_results.html")))
{
runDesc.TestState = TestState.Error;
runDesc.ErrorDesc = "No Results.xml or run_results.html file found";
}
}
//File.Delete(paramsFilePath);
runDesc.Runtime = s.Elapsed;
return runDesc;
}
/// <summary>
/// performs global cleanup code for this type of runner
/// </summary>
public void CleanUp()
{
}
#region Process
/// <summary>
/// executes the run of the test by using the Init and RunProcss routines
/// </summary>
/// <param name="proc"></param>
/// <param name="fileName"></param>
/// <param name="arguments"></param>
/// <param name="enableRedirection"></param>
private bool ExecuteProcess(string fileName, string arguments, ref string failureReason)
{
Process proc = null;
try
{
using (proc = new Process())
{
InitProcess(proc, fileName, arguments, true);
RunProcess(proc, true);
//it could be that the process already existed
//before we could handle the cancel request
if (_runCancelled())
{
failureReason = "Process was stopped since job has timed out!";
ConsoleWriter.WriteLine(failureReason);
if (!proc.HasExited)
{
proc.OutputDataReceived -= OnOutputDataReceived;
proc.ErrorDataReceived -= OnErrorDataReceived;
proc.Kill();
return false;
}
}
if (proc.ExitCode != 0)
{
failureReason = "The Api test runner's exit code was: " + proc.ExitCode;
ConsoleWriter.WriteLine(failureReason);
return false;
}
}
}
catch (Exception e)
{
failureReason = e.Message;
return false;
}
finally
{
if (proc != null)
{
proc.Close();
}
}
return true;
}
/// <summary>
/// initializes the ServiceTestExecuter process
/// </summary>
/// <param name="proc"></param>
/// <param name="fileName"></param>
/// <param name="arguments"></param>
/// <param name="enableRedirection"></param>
private void InitProcess(Process proc, string fileName, string arguments, bool enableRedirection)
{
var processStartInfo = new ProcessStartInfo
{
FileName = fileName,
Arguments = arguments,
WorkingDirectory = Directory.GetCurrentDirectory()
};
if (!enableRedirection) return;
processStartInfo.ErrorDialog = false;
processStartInfo.UseShellExecute = false;
processStartInfo.RedirectStandardOutput = true;
processStartInfo.RedirectStandardError = true;
proc.StartInfo = processStartInfo;
proc.EnableRaisingEvents = true;
proc.StartInfo.CreateNoWindow = true;
proc.OutputDataReceived += OnOutputDataReceived;
proc.ErrorDataReceived += OnErrorDataReceived;
}
/// <summary>
/// runs the ServiceTestExecuter process after initialization
/// </summary>
/// <param name="proc"></param>
/// <param name="enableRedirection"></param>
private void RunProcess(Process proc, bool enableRedirection)
{
proc.Start();
if (enableRedirection)
{
proc.BeginOutputReadLine();
proc.BeginErrorReadLine();
}
proc.WaitForExit(PollingTimeMs);
while (!_runCancelled() && !proc.HasExited)
{
proc.WaitForExit(PollingTimeMs);
}
}
/// <summary>
/// callback function for spawnd process errors
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void OnErrorDataReceived(object sender, DataReceivedEventArgs e)
{
var p = sender as Process;
if (p == null) return;
try
{
if (!p.HasExited || p.ExitCode == 0) return;
}
catch { return; }
string format = String.Format("{0} {1}: ", DateTime.Now.ToShortDateString(),
DateTime.Now.ToLongTimeString());
string errorData = e.Data;
if (String.IsNullOrEmpty(errorData))
{
errorData = String.Format("External process has exited with code {0}", p.ExitCode);
}
ConsoleWriter.WriteErrLine(errorData);
}
/// <summary>
/// callback function for spawnd process output
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
private void OnOutputDataReceived(object sender, DataReceivedEventArgs e)
{
if (!String.IsNullOrEmpty(e.Data))
{
string data = e.Data;
ConsoleWriter.WriteLine(data);
}
}
#endregion
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 1.0.1.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace Microsoft.Azure.Graph.RBAC
{
using Microsoft.Azure;
using Microsoft.Azure.Graph;
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Microsoft.Rest.Azure.OData;
using Models;
using Newtonsoft.Json;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// UsersOperations operations.
/// </summary>
internal partial class UsersOperations : IServiceOperations<GraphRbacManagementClient>, IUsersOperations
{
/// <summary>
/// Initializes a new instance of the UsersOperations class.
/// </summary>
/// <param name='client'>
/// Reference to the service client.
/// </param>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
internal UsersOperations(GraphRbacManagementClient client)
{
if (client == null)
{
throw new System.ArgumentNullException("client");
}
Client = client;
}
/// <summary>
/// Gets a reference to the GraphRbacManagementClient
/// </summary>
public GraphRbacManagementClient Client { get; private set; }
/// <summary>
/// Create a new user.
/// </summary>
/// <param name='parameters'>
/// Parameters to create a user.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<User>> CreateWithHttpMessagesAsync(UserCreateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (parameters != null)
{
parameters.Validate();
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Create", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/users").ToString();
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 201)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<User>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 201)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<User>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets list of users for the current tenant.
/// </summary>
/// <param name='odataQuery'>
/// OData parameters to apply to the operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<User>>> ListWithHttpMessagesAsync(ODataQuery<User> odataQuery = default(ODataQuery<User>), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("odataQuery", odataQuery);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "List", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/users").ToString();
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (odataQuery != null)
{
var _odataFilter = odataQuery.ToString();
if (!string.IsNullOrEmpty(_odataFilter))
{
_queryParameters.Add(_odataFilter);
}
}
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<User>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<User>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets user information from the directory.
/// </summary>
/// <param name='upnOrObjectId'>
/// The object ID or principal name of the user for which to get information.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<User>> GetWithHttpMessagesAsync(string upnOrObjectId, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (upnOrObjectId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "upnOrObjectId");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("upnOrObjectId", upnOrObjectId);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Get", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/users/{upnOrObjectId}").ToString();
_url = _url.Replace("{upnOrObjectId}", upnOrObjectId);
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<User>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<User>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Updates a user.
/// </summary>
/// <param name='upnOrObjectId'>
/// The object ID or principal name of the user to update.
/// </param>
/// <param name='parameters'>
/// Parameters to update an existing user.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> UpdateWithHttpMessagesAsync(string upnOrObjectId, UserUpdateParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (upnOrObjectId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "upnOrObjectId");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("upnOrObjectId", upnOrObjectId);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Update", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/users/{upnOrObjectId}").ToString();
_url = _url.Replace("{upnOrObjectId}", upnOrObjectId);
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("PATCH");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 204)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Delete a user.
/// </summary>
/// <param name='upnOrObjectId'>
/// The object ID or principal name of the user to delete.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string upnOrObjectId, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (upnOrObjectId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "upnOrObjectId");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("upnOrObjectId", upnOrObjectId);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "Delete", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/users/{upnOrObjectId}").ToString();
_url = _url.Replace("{upnOrObjectId}", upnOrObjectId);
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("DELETE");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 204)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets a collection that contains the object IDs of the groups of which the
/// user is a member.
/// </summary>
/// <param name='objectId'>
/// The object ID of the user for which to get group membership.
/// </param>
/// <param name='parameters'>
/// User filtering parameters.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IEnumerable<string>>> GetMemberGroupsWithHttpMessagesAsync(string objectId, UserGetMemberGroupsParameters parameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (objectId == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "objectId");
}
if (parameters == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "parameters");
}
if (parameters != null)
{
parameters.Validate();
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("objectId", objectId);
tracingParameters.Add("parameters", parameters);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "GetMemberGroups", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/users/{objectId}/getMemberGroups").ToString();
_url = _url.Replace("{objectId}", objectId);
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("POST");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
if(parameters != null)
{
_requestContent = Rest.Serialization.SafeJsonConvert.SerializeObject(parameters, Client.SerializationSettings);
_httpRequest.Content = new StringContent(_requestContent, System.Text.Encoding.UTF8);
_httpRequest.Content.Headers.ContentType =System.Net.Http.Headers.MediaTypeHeaderValue.Parse("application/json; charset=utf-8");
}
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IEnumerable<string>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page1<string>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
/// <summary>
/// Gets a list of users for the current tenant.
/// </summary>
/// <param name='nextLink'>
/// Next link for the list operation.
/// </param>
/// <param name='customHeaders'>
/// Headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="GraphErrorException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="ValidationException">
/// Thrown when a required parameter is null
/// </exception>
/// <exception cref="System.ArgumentNullException">
/// Thrown when a required parameter is null
/// </exception>
/// <return>
/// A response object containing the response body and response headers.
/// </return>
public async Task<AzureOperationResponse<IPage<User>>> ListNextWithHttpMessagesAsync(string nextLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken))
{
if (nextLink == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "nextLink");
}
if (Client.ApiVersion == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.ApiVersion");
}
if (Client.TenantID == null)
{
throw new ValidationException(ValidationRules.CannotBeNull, "this.Client.TenantID");
}
// Tracing
bool _shouldTrace = ServiceClientTracing.IsEnabled;
string _invocationId = null;
if (_shouldTrace)
{
_invocationId = ServiceClientTracing.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("nextLink", nextLink);
tracingParameters.Add("cancellationToken", cancellationToken);
ServiceClientTracing.Enter(_invocationId, this, "ListNext", tracingParameters);
}
// Construct URL
var _baseUrl = Client.BaseUri.AbsoluteUri;
var _url = new System.Uri(new System.Uri(_baseUrl + (_baseUrl.EndsWith("/") ? "" : "/")), "{tenantID}/{nextLink}").ToString();
_url = _url.Replace("{nextLink}", nextLink);
_url = _url.Replace("{tenantID}", System.Uri.EscapeDataString(Client.TenantID));
List<string> _queryParameters = new List<string>();
if (Client.ApiVersion != null)
{
_queryParameters.Add(string.Format("api-version={0}", System.Uri.EscapeDataString(Client.ApiVersion)));
}
if (_queryParameters.Count > 0)
{
_url += (_url.Contains("?") ? "&" : "?") + string.Join("&", _queryParameters);
}
// Create HTTP transport objects
var _httpRequest = new HttpRequestMessage();
HttpResponseMessage _httpResponse = null;
_httpRequest.Method = new HttpMethod("GET");
_httpRequest.RequestUri = new System.Uri(_url);
// Set Headers
if (Client.GenerateClientRequestId != null && Client.GenerateClientRequestId.Value)
{
_httpRequest.Headers.TryAddWithoutValidation("x-ms-client-request-id", System.Guid.NewGuid().ToString());
}
if (Client.AcceptLanguage != null)
{
if (_httpRequest.Headers.Contains("accept-language"))
{
_httpRequest.Headers.Remove("accept-language");
}
_httpRequest.Headers.TryAddWithoutValidation("accept-language", Client.AcceptLanguage);
}
if (customHeaders != null)
{
foreach(var _header in customHeaders)
{
if (_httpRequest.Headers.Contains(_header.Key))
{
_httpRequest.Headers.Remove(_header.Key);
}
_httpRequest.Headers.TryAddWithoutValidation(_header.Key, _header.Value);
}
}
// Serialize Request
string _requestContent = null;
// Set Credentials
if (Client.Credentials != null)
{
cancellationToken.ThrowIfCancellationRequested();
await Client.Credentials.ProcessHttpRequestAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
}
// Send Request
if (_shouldTrace)
{
ServiceClientTracing.SendRequest(_invocationId, _httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
_httpResponse = await Client.HttpClient.SendAsync(_httpRequest, cancellationToken).ConfigureAwait(false);
if (_shouldTrace)
{
ServiceClientTracing.ReceiveResponse(_invocationId, _httpResponse);
}
HttpStatusCode _statusCode = _httpResponse.StatusCode;
cancellationToken.ThrowIfCancellationRequested();
string _responseContent = null;
if ((int)_statusCode != 200)
{
var ex = new GraphErrorException(string.Format("Operation returned an invalid status code '{0}'", _statusCode));
try
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
GraphError _errorBody = Rest.Serialization.SafeJsonConvert.DeserializeObject<GraphError>(_responseContent, Client.DeserializationSettings);
if (_errorBody != null)
{
ex.Body = _errorBody;
}
}
catch (JsonException)
{
// Ignore the exception
}
ex.Request = new HttpRequestMessageWrapper(_httpRequest, _requestContent);
ex.Response = new HttpResponseMessageWrapper(_httpResponse, _responseContent);
if (_shouldTrace)
{
ServiceClientTracing.Error(_invocationId, ex);
}
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw ex;
}
// Create Result
var _result = new AzureOperationResponse<IPage<User>>();
_result.Request = _httpRequest;
_result.Response = _httpResponse;
if (_httpResponse.Headers.Contains("x-ms-request-id"))
{
_result.RequestId = _httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
// Deserialize Response
if ((int)_statusCode == 200)
{
_responseContent = await _httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false);
try
{
_result.Body = Rest.Serialization.SafeJsonConvert.DeserializeObject<Page<User>>(_responseContent, Client.DeserializationSettings);
}
catch (JsonException ex)
{
_httpRequest.Dispose();
if (_httpResponse != null)
{
_httpResponse.Dispose();
}
throw new SerializationException("Unable to deserialize the response.", _responseContent, ex);
}
}
if (_shouldTrace)
{
ServiceClientTracing.Exit(_invocationId, _result);
}
return _result;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using DotNetConfig;
using Palmmedia.ReportGenerator.Core.Logging;
using Palmmedia.ReportGenerator.Core.Properties;
namespace Palmmedia.ReportGenerator.Core
{
/// <summary>
/// Builder for <see cref="ReportConfiguration"/>.
/// Creates instances of <see cref="ReportConfiguration"/> based on command line parameters.
/// </summary>
public class ReportConfigurationBuilder
{
/// <summary>
/// The Logger.
/// </summary>
private static readonly ILogger Logger = LoggerFactory.GetLogger(typeof(ReportConfigurationBuilder));
/// <summary>
/// Initializes a new instance of the <see cref="ReportConfiguration"/> class.
/// </summary>
/// <param name="cliArguments">The command line arguments stored as key value pairs.</param>
/// <returns>The report configuration.</returns>
public ReportConfiguration Create(Dictionary<string, string> cliArguments)
{
var namedArguments = new Dictionary<string, string>(cliArguments, StringComparer.OrdinalIgnoreCase);
var config = Config.Build().GetSection(DotNetConfigSettingNames.SectionName);
var reportFilePatterns = Array.Empty<string>();
var targetDirectory = string.Empty;
var sourceDirectories = Array.Empty<string>();
string historyDirectory = null;
var reportTypes = Array.Empty<string>();
var plugins = Array.Empty<string>();
var assemblyFilters = Array.Empty<string>();
var classFilters = Array.Empty<string>();
var fileFilters = Array.Empty<string>();
string verbosityLevel = null;
string title = null;
string tag = null;
string value = null;
if (namedArguments.TryGetValue(CommandLineArgumentNames.Reports, out value))
{
reportFilePatterns = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (config.TryGetString(DotNetConfigSettingNames.Reports, out value))
{
reportFilePatterns = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
reportFilePatterns = config
.GetAll(DotNetConfigSettingNames.Report)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.TargetDirectory, out value))
{
targetDirectory = value;
}
else if (config.TryGetString(DotNetConfigSettingNames.TargetDirectory, out value))
{
targetDirectory = value;
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.SourceDirectories, out value))
{
sourceDirectories = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (config.TryGetString(DotNetConfigSettingNames.SourceDirectories, out value))
{
sourceDirectories = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
sourceDirectories = config
.GetAll(DotNetConfigSettingNames.SourceDirectory)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.HistoryDirectory, out value))
{
historyDirectory = value;
}
else if (config.TryGetString(DotNetConfigSettingNames.HistoryDirectory, out value))
{
historyDirectory = value;
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.ReportTypes, out value))
{
reportTypes = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (namedArguments.TryGetValue(CommandLineArgumentNames.ReportType, out value))
{
reportTypes = new[] { value };
}
else if (config.TryGetString(DotNetConfigSettingNames.ReportTypes, out value))
{
reportTypes = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
reportTypes = config
.GetAll(DotNetConfigSettingNames.ReportType)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.Plugins, out value))
{
plugins = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (config.TryGetString(DotNetConfigSettingNames.Plugins, out value))
{
plugins = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
plugins = config
.GetAll(DotNetConfigSettingNames.Plugin)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.AssemblyFilters, out value))
{
assemblyFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (namedArguments.TryGetValue(CommandLineArgumentNames.Filters, out value))
{
assemblyFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (config.TryGetString(DotNetConfigSettingNames.AssemblyFilters, out value))
{
assemblyFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
assemblyFilters = config
.GetAll(DotNetConfigSettingNames.AssemblyFilter)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.ClassFilters, out value))
{
classFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (config.TryGetString(DotNetConfigSettingNames.ClassFilters, out value))
{
classFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
classFilters = config
.GetAll(DotNetConfigSettingNames.ClassFilter)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.FileFilters, out value))
{
fileFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else if (config.TryGetString(DotNetConfigSettingNames.FileFilters, out value))
{
fileFilters = value.Split(new[] { ';' }, StringSplitOptions.RemoveEmptyEntries);
}
else
{
fileFilters = config
.GetAll(DotNetConfigSettingNames.FileFilter)
.Select(x => x.RawValue)
.Where(x => !string.IsNullOrEmpty(x))
.ToArray();
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.Verbosity, out value))
{
verbosityLevel = value;
}
else if (config.TryGetString(DotNetConfigSettingNames.Verbosity, out value))
{
verbosityLevel = value;
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.Title, out value))
{
title = value;
}
else if (config.TryGetString(DotNetConfigSettingNames.Title, out value))
{
title = value;
}
if (namedArguments.TryGetValue(CommandLineArgumentNames.Tag, out value))
{
tag = value;
}
else if (config.TryGetString(DotNetConfigSettingNames.Tag, out value))
{
tag = value;
}
return new ReportConfiguration(
reportFilePatterns,
targetDirectory,
sourceDirectories,
historyDirectory,
reportTypes,
plugins,
assemblyFilters,
classFilters,
fileFilters,
verbosityLevel,
tag,
title);
}
/// <summary>
/// Initializes a new instance of the <see cref="ReportConfiguration"/> class.
/// </summary>
/// <param name="args">The command line arguments.</param>
/// <returns>The report configuration.</returns>
internal ReportConfiguration Create(string[] args)
{
if (args == null)
{
throw new ArgumentNullException(nameof(args));
}
var namedArguments = new Dictionary<string, string>();
foreach (var arg in args)
{
var match = CommandLineArgumentNames.CommandLineParameterRegex.Match(arg);
if (match.Success)
{
if (namedArguments.ContainsKey(match.Groups["key"].Value))
{
Logger.WarnFormat(Resources.DuplicateCommandLineParameter, match.Groups["key"].Value, namedArguments[match.Groups["key"].Value]);
}
else
{
if (CommandLineArgumentNames.IsValid(match.Groups["key"].Value))
{
namedArguments[match.Groups["key"].Value] = match.Groups["value"].Value;
}
else
{
Logger.WarnFormat(Resources.UnknownCommandLineParameter, match.Groups["key"].Value);
}
}
}
}
return this.Create(namedArguments);
}
}
}
| |
using Ocelot.DownstreamRouteFinder.UrlMatcher;
using Ocelot.Responses;
using Ocelot.Values;
using Shouldly;
using TestStack.BDDfy;
using Xunit;
namespace Ocelot.UnitTests.DownstreamRouteFinder.UrlMatcher
{
public class RegExUrlMatcherTests
{
private readonly IUrlPathToUrlTemplateMatcher _urlMatcher;
private string _path;
private string _downstreamPathTemplate;
private Response<UrlMatch> _result;
private string _queryString;
private bool _containsQueryString;
public RegExUrlMatcherTests()
{
_urlMatcher = new RegExUrlMatcher();
}
[Fact]
public void should_not_match()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/api/v1/aaaaaaaaa/cards"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^(?i)/api/v[^/]+/cards$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsFalse())
.BDDfy();
}
[Fact]
public void should_match()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/api/v1/cards"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^(?i)/api/v[^/]+/cards$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_match_path_with_no_query_string()
{
const string regExForwardSlashAndOnePlaceHolder = "^(?i)/newThing$";
this.Given(x => x.GivenIHaveAUpstreamPath("/newThing"))
.And(_ => GivenIHaveAQueryString("?DeviceType=IphoneApp&Browser=moonpigIphone&BrowserString=-&CountryCode=123&DeviceName=iPhone 5 (GSM+CDMA)&OperatingSystem=iPhone OS 7.1.2&BrowserVersion=3708AdHoc&ipAddress=-"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern(regExForwardSlashAndOnePlaceHolder))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_match_query_string()
{
const string regExForwardSlashAndOnePlaceHolder = "^(?i)/api/subscriptions/[^/]+/updates\\?unitId=.+$";
this.Given(x => x.GivenIHaveAUpstreamPath("/api/subscriptions/1/updates"))
.And(_ => GivenIHaveAQueryString("?unitId=2"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern(regExForwardSlashAndOnePlaceHolder))
.And(_ => GivenThereIsAQueryInTemplate())
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_match_query_string_with_multiple_params()
{
const string regExForwardSlashAndOnePlaceHolder = "^(?i)/api/subscriptions/[^/]+/updates\\?unitId=.+&productId=.+$";
this.Given(x => x.GivenIHaveAUpstreamPath("/api/subscriptions/1/updates?unitId=2"))
.And(_ => GivenIHaveAQueryString("?unitId=2&productId=2"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern(regExForwardSlashAndOnePlaceHolder))
.And(_ => GivenThereIsAQueryInTemplate())
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_not_match_slash_becaue_we_need_to_match_something_after_it()
{
const string regExForwardSlashAndOnePlaceHolder = "^/[0-9a-zA-Z].+";
this.Given(x => x.GivenIHaveAUpstreamPath("/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern(regExForwardSlashAndOnePlaceHolder))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsFalse())
.BDDfy();
}
[Fact]
public void should_not_match_forward_slash_only_regex()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/working/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^/$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsFalse())
.BDDfy();
}
[Fact]
public void should_not_match_issue_134()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/api/vacancy/1/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^(?i)/vacancy/[^/]+/$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsFalse())
.BDDfy();
}
[Fact]
public void should_match_forward_slash_only_regex()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^/$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_find_match_when_template_smaller_than_valid_path()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/api/products/2354325435624623464235"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^/api/products/.+$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_not_find_match()
{
this.Given(x => x.GivenIHaveAUpstreamPath("/api/values"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^/$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsFalse())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url()
{
this.Given(x => x.GivenIHaveAUpstreamPath(""))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^$"))
.When(x => x.WhenIMatchThePaths())
.And(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_no_slash()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api"))
.Given(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_one_slash()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/"))
.Given(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_downstream_template()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/product/products/"))
.Given(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_downstream_template_with_one_place_holder()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/product/products/1"))
.Given(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/.+$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_downstream_template_with_two_place_holders()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/product/products/1/2"))
.Given(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/[^/]+/.+$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_downstream_template_with_two_place_holders_seperated_by_something()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/product/products/1/categories/2"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/[^/]+/categories/.+$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_downstream_template_with_three_place_holders_seperated_by_something()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/product/products/1/categories/2/variant/123"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/[^/]+/categories/[^/]+/variant/.+$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void can_match_down_stream_url_with_downstream_template_with_three_place_holders()
{
this.Given(x => x.GivenIHaveAUpstreamPath("api/product/products/1/categories/2/variant/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/[^/]+/categories/[^/]+/variant/$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_ignore_case_sensitivity()
{
this.Given(x => x.GivenIHaveAUpstreamPath("API/product/products/1/categories/2/variant/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^(?i)api/product/products/[^/]+/categories/[^/]+/variant/$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsTrue())
.BDDfy();
}
[Fact]
public void should_respect_case_sensitivity()
{
this.Given(x => x.GivenIHaveAUpstreamPath("API/product/products/1/categories/2/variant/"))
.And(x => x.GivenIHaveAnUpstreamUrlTemplatePattern("^api/product/products/[^/]+/categories/[^/]+/variant/$"))
.When(x => x.WhenIMatchThePaths())
.Then(x => x.ThenTheResultIsFalse())
.BDDfy();
}
private void GivenIHaveAUpstreamPath(string path)
{
_path = path;
}
private void GivenIHaveAQueryString(string queryString)
{
_queryString = queryString;
}
private void GivenIHaveAnUpstreamUrlTemplatePattern(string downstreamUrlTemplate)
{
_downstreamPathTemplate = downstreamUrlTemplate;
}
private void WhenIMatchThePaths()
{
_result = _urlMatcher.Match(_path, _queryString, new UpstreamPathTemplate(_downstreamPathTemplate, 0, _containsQueryString, _downstreamPathTemplate));
}
private void ThenTheResultIsTrue()
{
_result.Data.Match.ShouldBeTrue();
}
private void ThenTheResultIsFalse()
{
_result.Data.Match.ShouldBeFalse();
}
private void GivenThereIsAQueryInTemplate()
{
_containsQueryString = true;
}
}
}
| |
//
// Encog(tm) Core v3.3 - .Net Version
// http://www.heatonresearch.com/encog/
//
// Copyright 2008-2014 Heaton Research, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// For more information on Heaton Research copyrights, licenses
// and trademarks visit:
// http://www.heatonresearch.com/copyright
//
using System;
using Encog.ML.EA.Population;
using Encog.ML;
using Encog.Util.Identity;
using Encog.Neural.NEAT.Training;
using Encog.ML.EA.Genome;
using Encog.Util.Obj;
using Encog.ML.EA.Codec;
using Encog.MathUtil.Randomize.Factory;
using Encog.ML.Data;
using Encog.Neural.HyperNEAT.Substrate;
using Encog.Engine.Network.Activation;
using Encog.Neural.HyperNEAT;
using Encog.ML.EA.Species;
using Encog.MathUtil.Randomize;
namespace Encog.Neural.NEAT
{
/// <summary>
/// A population for a NEAT or HyperNEAT system. population holds the
/// genomes, substrate and other values for a NEAT or HyperNEAT network.
///
/// NeuroEvolution of Augmenting Topologies (NEAT) is a genetic algorithm for the
/// generation of evolving artificial neural networks. It was developed by Ken
/// Stanley while at The University of Texas at Austin.
///
/// -----------------------------------------------------------------------------
/// http://www.cs.ucf.edu/~kstanley/
/// Encog's NEAT implementation was drawn from the following three Journal
/// Articles. For more complete BibTeX sources, see NEATNetwork.java.
///
/// Evolving Neural Networks Through Augmenting Topologies
///
/// Generating Large-Scale Neural Networks Through Discovering Geometric
/// Regularities
///
/// Automatic feature selection in neuroevolution
/// </summary>
[Serializable]
public class NEATPopulation : BasicPopulation, IMLError, IMLRegression
{
/// <summary>
/// The default survival rate.
/// </summary>
public const double DefaultSurvivalRate = 0.2;
/// <summary>
/// The activation function to use.
/// </summary>
public const String PropertyNEATActivation = "neatAct";
/// <summary>
/// Property tag for the population size.
/// </summary>
public const String PropertyPopulationSize = "populationSize";
/// <summary>
/// Property tag for the survival rate.
/// </summary>
public const String PropertySurvivalRate = "survivalRate";
/// <summary>
/// Default number of activation cycles.
/// </summary>
public const int DefaultCycles = 4;
/// <summary>
/// Property to hold the number of cycles.
/// </summary>
public const String PropertyCycles = "cycles";
/// <summary>
/// Change the weight, do not allow the weight to go out of the weight range.
/// </summary>
/// <param name="w">The amount to change the weight by.</param>
/// <param name="weightRange">Specify the weight range. The range is from -weightRange to
/// +weightRange.</param>
/// <returns>The new weight value.</returns>
public static double ClampWeight(double w, double weightRange)
{
if (w < -weightRange)
{
return -weightRange;
}
if (w > weightRange)
{
return weightRange;
}
return w;
}
/// <summary>
/// The number of activation cycles that the networks produced by
/// population will use.
/// </summary>
private int _activationCycles = NEATPopulation.DefaultCycles;
/// <summary>
/// Generate gene id's.
/// </summary>
private readonly IGenerateID _geneIdGenerate = new BasicGenerateID();
/// <summary>
/// Generate innovation id's.
/// </summary>
private readonly IGenerateID _innovationIdGenerate = new BasicGenerateID();
/// <summary>
/// A list of innovations, or null if feature is not being used.
/// </summary>
public NEATInnovationList Innovations { get; set; }
/// <summary>
/// The weight range. Weights will be between -weight and +weight.
/// </summary>
public double WeightRange { get; set; }
/// <summary>
/// The best genome that we've currently decoded into the bestNetwork
/// property. If value changes to point to a new genome reference then
/// the phenome will need to be recalculated.
/// </summary>
private IGenome _cachedBestGenome;
/// <summary>
/// The best network. If the population is used as an MLMethod, then
/// network will represent.
/// </summary>
private NEATNetwork _bestNetwork;
/// <summary>
/// The number of input units. All members of the population must agree with
/// number.
/// </summary>
public int InputCount { get; set; }
/// <summary>
/// The number of output units. All members of the population must agree with
/// number.
/// </summary>
public int OutputCount { get; set; }
/// <summary>
/// The survival rate.
/// </summary>
public double SurvivalRate { get; set; }
/// <summary>
/// The substrate, if is a hyperneat network.
/// </summary>
public Substrate CurrentSubstrate { get; set; }
/// <summary>
/// The activation functions that we can choose from.
/// </summary>
private readonly ChooseObject<IActivationFunction> _activationFunctions = new ChooseObject<IActivationFunction>();
/// <summary>
/// The CODEC used to decode the NEAT genomes into networks. Different
/// CODEC's are used for NEAT vs HyperNEAT.
/// </summary>
public IGeneticCODEC CODEC { get; set; }
/// <summary>
/// The initial connection density for the initial random population of
/// genomes.
/// </summary>
public double InitialConnectionDensity { get; set; }
/// <summary>
/// A factory to create random number generators.
/// </summary>
private IRandomFactory RandomNumberFactory { get; set; }
/// <summary>
/// An empty constructor for serialization.
/// </summary>
public NEATPopulation()
{
SurvivalRate = DefaultSurvivalRate;
WeightRange = 5;
InitialConnectionDensity = 0.1;
RandomNumberFactory = EncogFramework.Instance
.RandomFactory.FactorFactory();
}
/// <summary>
/// Construct a starting NEAT population. does not generate the initial
/// random population of genomes.
/// </summary>
/// <param name="inputCount">The input neuron count.</param>
/// <param name="outputCount">The output neuron count.</param>
/// <param name="populationSize">The population size.</param>
public NEATPopulation(int inputCount, int outputCount,
int populationSize)
: base(populationSize, null)
{
SurvivalRate = DefaultSurvivalRate;
WeightRange = 5;
InitialConnectionDensity = 0.1;
RandomNumberFactory = EncogFramework.Instance
.RandomFactory.FactorFactory();
InputCount = inputCount;
OutputCount = outputCount;
NEATActivationFunction = new ActivationSteepenedSigmoid();
if (populationSize == 0)
{
throw new NeuralNetworkError(
"Population must have more than zero genomes.");
}
}
/// <summary>
/// Construct a starting HyperNEAT population. does not generate the
/// initial random population of genomes.
/// </summary>
/// <param name="theSubstrate">The substrate ID.</param>
/// <param name="populationSize">The population size.</param>
public NEATPopulation(Substrate theSubstrate, int populationSize)
: base(populationSize, new FactorHyperNEATGenome())
{
SurvivalRate = DefaultSurvivalRate;
WeightRange = 5;
InitialConnectionDensity = 0.1;
RandomNumberFactory = EncogFramework.Instance
.RandomFactory.FactorFactory();
CurrentSubstrate = theSubstrate;
InputCount = 6;
OutputCount = 2;
HyperNEATGenome.BuildCPPNActivationFunctions(_activationFunctions);
}
/// <summary>
/// A newly generated gene id.
/// </summary>
/// <returns>A newly generated gene id.</returns>
public long AssignGeneId()
{
return _geneIdGenerate.Generate();
}
/// <summary>
/// Assign an innovation id.
/// </summary>
/// <returns>A newly generated innovation id.</returns>
public long AssignInnovationId()
{
return _innovationIdGenerate.Generate();
}
/// <inheritdoc/>
public double CalculateError(IMLDataSet data)
{
UpdateBestNetwork();
return _bestNetwork.CalculateError(data);
}
/// <inheritdoc/>
public IMLData Compute(IMLData input)
{
UpdateBestNetwork();
return _bestNetwork.Compute(input);
}
/// <summary>
/// The activation cycles.
/// </summary>
public int ActivationCycles
{
get
{
return _activationCycles;
}
set
{
_activationCycles = value;
}
}
/// <summary>
/// The activation functions.
/// </summary>
public ChooseObject<IActivationFunction> ActivationFunctions
{
get
{
return _activationFunctions;
}
}
/// <summary>
/// Generate a gene id.
/// </summary>
public IGenerateID GeneIdGenerate
{
get
{
return _geneIdGenerate;
}
}
/// <inheritdoc/>
public INEATGenomeFactory GenomeFactory
{
get
{
return (INEATGenomeFactory)base.GenomeFactory;
}
set
{
base.GenomeFactory = value;
}
}
/// <summary>
/// Innovation id generator.
/// </summary>
public IGenerateID InnovationIDGenerate
{
get
{
return _innovationIdGenerate;
}
}
/// <summary>
/// Returns true if is a hyperneat population.
/// </summary>
public bool IsHyperNEAT
{
get
{
return CurrentSubstrate != null;
}
}
/// <summary>
/// Create an initial random population.
/// </summary>
public void Reset()
{
// create the genome factory
if (IsHyperNEAT)
{
CODEC = new HyperNEATCODEC();
GenomeFactory = new FactorHyperNEATGenome();
}
else
{
CODEC = new NEATCODEC();
GenomeFactory = new FactorNEATGenome();
}
// create the new genomes
Species.Clear();
// reset counters
GeneIdGenerate.CurrentID = 1;
InnovationIDGenerate.CurrentID = 1;
EncogRandom rnd = RandomNumberFactory.Factor();
// create one default species
BasicSpecies defaultSpecies = new BasicSpecies();
defaultSpecies.Population = this;
// create the initial population
for (int i = 0; i < PopulationSize; i++)
{
NEATGenome genome = GenomeFactory.Factor(rnd, this ,
InputCount, OutputCount,
InitialConnectionDensity);
defaultSpecies.Add(genome);
}
defaultSpecies.Leader = defaultSpecies.Members[0];
Species.Add(defaultSpecies);
// create initial innovations
Innovations = new NEATInnovationList(this);
}
/// <summary>
/// Specify to use a single activation function. is typically the case
/// for NEAT, but not for HyperNEAT.
/// </summary>
public IActivationFunction NEATActivationFunction
{
set
{
_activationFunctions.Clear();
_activationFunctions.Add(1.0, value);
_activationFunctions.FinalizeStructure();
}
}
/// <summary>
/// See if the best genome has changed, and decode a new best network, if
/// needed.
/// </summary>
private void UpdateBestNetwork()
{
if (BestGenome != _cachedBestGenome)
{
_cachedBestGenome = BestGenome;
_bestNetwork = (NEATNetwork)CODEC.Decode(BestGenome);
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net;
using System.Net.Http;
using System.Threading;
using System.Threading.Tasks;
using Hyak.Common;
using Microsoft.Azure;
using Microsoft.Azure.Management.Resources;
using Microsoft.Azure.Management.Resources.Models;
namespace Microsoft.Azure.Management.Resources
{
public partial class ResourceManagementClient : ServiceClient<ResourceManagementClient>, IResourceManagementClient
{
private string _apiVersion;
/// <summary>
/// Gets the API version.
/// </summary>
public string ApiVersion
{
get { return this._apiVersion; }
}
private Uri _baseUri;
/// <summary>
/// Gets the URI used as the base for all cloud service requests.
/// </summary>
public Uri BaseUri
{
get { return this._baseUri; }
}
private SubscriptionCloudCredentials _credentials;
/// <summary>
/// Gets subscription credentials which uniquely identify Microsoft
/// Azure subscription. The subscription ID forms part of the URI for
/// every service call.
/// </summary>
public SubscriptionCloudCredentials Credentials
{
get { return this._credentials; }
}
private int _longRunningOperationInitialTimeout;
/// <summary>
/// Gets or sets the initial timeout for Long Running Operations.
/// </summary>
public int LongRunningOperationInitialTimeout
{
get { return this._longRunningOperationInitialTimeout; }
set { this._longRunningOperationInitialTimeout = value; }
}
private int _longRunningOperationRetryTimeout;
/// <summary>
/// Gets or sets the retry timeout for Long Running Operations.
/// </summary>
public int LongRunningOperationRetryTimeout
{
get { return this._longRunningOperationRetryTimeout; }
set { this._longRunningOperationRetryTimeout = value; }
}
private IDeploymentOperationOperations _deploymentOperations;
/// <summary>
/// Operations for managing deployment operations.
/// </summary>
public virtual IDeploymentOperationOperations DeploymentOperations
{
get { return this._deploymentOperations; }
}
private IDeploymentOperations _deployments;
/// <summary>
/// Operations for managing deployments.
/// </summary>
public virtual IDeploymentOperations Deployments
{
get { return this._deployments; }
}
private IProviderOperations _providers;
/// <summary>
/// Operations for managing providers.
/// </summary>
public virtual IProviderOperations Providers
{
get { return this._providers; }
}
private IProviderOperationsMetadataOperations _providerOperationsMetadata;
/// <summary>
/// Operations for getting provider operations metadata.
/// </summary>
public virtual IProviderOperationsMetadataOperations ProviderOperationsMetadata
{
get { return this._providerOperationsMetadata; }
}
private IResourceGroupOperations _resourceGroups;
/// <summary>
/// Operations for managing resource groups.
/// </summary>
public virtual IResourceGroupOperations ResourceGroups
{
get { return this._resourceGroups; }
}
private IResourceOperations _resources;
/// <summary>
/// Operations for managing resources.
/// </summary>
public virtual IResourceOperations Resources
{
get { return this._resources; }
}
private IResourceProviderOperationDetailsOperations _resourceProviderOperationDetails;
/// <summary>
/// Operations for managing Resource provider operations.
/// </summary>
public virtual IResourceProviderOperationDetailsOperations ResourceProviderOperationDetails
{
get { return this._resourceProviderOperationDetails; }
}
private ITagOperations _tags;
/// <summary>
/// Operations for managing tags.
/// </summary>
public virtual ITagOperations Tags
{
get { return this._tags; }
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
public ResourceManagementClient()
: base()
{
this._deploymentOperations = new DeploymentOperationOperations(this);
this._deployments = new DeploymentOperations(this);
this._providers = new ProviderOperations(this);
this._providerOperationsMetadata = new ProviderOperationsMetadataOperations(this);
this._resourceGroups = new ResourceGroupOperations(this);
this._resources = new ResourceOperations(this);
this._resourceProviderOperationDetails = new ResourceProviderOperationDetailsOperations(this);
this._tags = new TagOperations(this);
this._apiVersion = "2014-04-01-preview";
this._longRunningOperationInitialTimeout = -1;
this._longRunningOperationRetryTimeout = -1;
this.HttpClient.Timeout = TimeSpan.FromSeconds(300);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='baseUri'>
/// Optional. Gets the URI used as the base for all cloud service
/// requests.
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, Uri baseUri)
: this()
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._credentials = credentials;
this._baseUri = baseUri;
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials)
: this()
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
this._baseUri = new Uri("https://management.azure.com/");
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(HttpClient httpClient)
: base(httpClient)
{
this._deploymentOperations = new DeploymentOperationOperations(this);
this._deployments = new DeploymentOperations(this);
this._providers = new ProviderOperations(this);
this._providerOperationsMetadata = new ProviderOperationsMetadataOperations(this);
this._resourceGroups = new ResourceGroupOperations(this);
this._resources = new ResourceOperations(this);
this._resourceProviderOperationDetails = new ResourceProviderOperationDetailsOperations(this);
this._tags = new TagOperations(this);
this._apiVersion = "2014-04-01-preview";
this._longRunningOperationInitialTimeout = -1;
this._longRunningOperationRetryTimeout = -1;
this.HttpClient.Timeout = TimeSpan.FromSeconds(300);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='baseUri'>
/// Optional. Gets the URI used as the base for all cloud service
/// requests.
/// </param>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, Uri baseUri, HttpClient httpClient)
: this(httpClient)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
if (baseUri == null)
{
throw new ArgumentNullException("baseUri");
}
this._credentials = credentials;
this._baseUri = baseUri;
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Initializes a new instance of the ResourceManagementClient class.
/// </summary>
/// <param name='credentials'>
/// Required. Gets subscription credentials which uniquely identify
/// Microsoft Azure subscription. The subscription ID forms part of
/// the URI for every service call.
/// </param>
/// <param name='httpClient'>
/// The Http client
/// </param>
public ResourceManagementClient(SubscriptionCloudCredentials credentials, HttpClient httpClient)
: this(httpClient)
{
if (credentials == null)
{
throw new ArgumentNullException("credentials");
}
this._credentials = credentials;
this._baseUri = new Uri("https://management.azure.com/");
this.Credentials.InitializeServiceClient(this);
}
/// <summary>
/// Clones properties from current instance to another
/// ResourceManagementClient instance
/// </summary>
/// <param name='client'>
/// Instance of ResourceManagementClient to clone to
/// </param>
protected override void Clone(ServiceClient<ResourceManagementClient> client)
{
base.Clone(client);
if (client is ResourceManagementClient)
{
ResourceManagementClient clonedClient = ((ResourceManagementClient)client);
clonedClient._credentials = this._credentials;
clonedClient._baseUri = this._baseUri;
clonedClient._apiVersion = this._apiVersion;
clonedClient._longRunningOperationInitialTimeout = this._longRunningOperationInitialTimeout;
clonedClient._longRunningOperationRetryTimeout = this._longRunningOperationRetryTimeout;
clonedClient.Credentials.InitializeServiceClient(clonedClient);
}
}
/// <summary>
/// The Get Operation Status operation returns the status of the
/// specified operation. After calling an asynchronous operation, you
/// can call Get Operation Status to determine whether the operation
/// has succeeded, failed, or is still in progress.
/// </summary>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation.
/// </param>
/// <param name='cancellationToken'>
/// Cancellation token.
/// </param>
/// <returns>
/// A standard service response for long running operations.
/// </returns>
public async Task<LongRunningOperationResponse> GetLongRunningOperationStatusAsync(string operationStatusLink, CancellationToken cancellationToken)
{
// Validate
if (operationStatusLink == null)
{
throw new ArgumentNullException("operationStatusLink");
}
// Tracing
bool shouldTrace = TracingAdapter.IsEnabled;
string invocationId = null;
if (shouldTrace)
{
invocationId = TracingAdapter.NextInvocationId.ToString();
Dictionary<string, object> tracingParameters = new Dictionary<string, object>();
tracingParameters.Add("operationStatusLink", operationStatusLink);
TracingAdapter.Enter(invocationId, this, "GetLongRunningOperationStatusAsync", tracingParameters);
}
// Construct URL
string url = "";
url = url + operationStatusLink;
url = url.Replace(" ", "%20");
// Create HTTP transport objects
HttpRequestMessage httpRequest = null;
try
{
httpRequest = new HttpRequestMessage();
httpRequest.Method = HttpMethod.Get;
httpRequest.RequestUri = new Uri(url);
// Set Headers
httpRequest.Headers.Add("x-ms-version", "2014-04-01-preview");
// Set Credentials
cancellationToken.ThrowIfCancellationRequested();
await this.Credentials.ProcessHttpRequestAsync(httpRequest, cancellationToken).ConfigureAwait(false);
// Send Request
HttpResponseMessage httpResponse = null;
try
{
if (shouldTrace)
{
TracingAdapter.SendRequest(invocationId, httpRequest);
}
cancellationToken.ThrowIfCancellationRequested();
httpResponse = await this.HttpClient.SendAsync(httpRequest, cancellationToken).ConfigureAwait(false);
if (shouldTrace)
{
TracingAdapter.ReceiveResponse(invocationId, httpResponse);
}
HttpStatusCode statusCode = httpResponse.StatusCode;
if (statusCode != HttpStatusCode.OK && statusCode != HttpStatusCode.Accepted && statusCode != HttpStatusCode.NoContent)
{
cancellationToken.ThrowIfCancellationRequested();
CloudException ex = CloudException.Create(httpRequest, null, httpResponse, await httpResponse.Content.ReadAsStringAsync().ConfigureAwait(false));
if (shouldTrace)
{
TracingAdapter.Error(invocationId, ex);
}
throw ex;
}
// Create Result
LongRunningOperationResponse result = null;
// Deserialize Response
result = new LongRunningOperationResponse();
result.StatusCode = statusCode;
if (httpResponse.Headers.Contains("x-ms-request-id"))
{
result.RequestId = httpResponse.Headers.GetValues("x-ms-request-id").FirstOrDefault();
}
if (statusCode == HttpStatusCode.Conflict)
{
result.Status = OperationStatus.Failed;
}
if (statusCode == HttpStatusCode.NoContent)
{
result.Status = OperationStatus.Succeeded;
}
if (statusCode == HttpStatusCode.OK)
{
result.Status = OperationStatus.Succeeded;
}
if (shouldTrace)
{
TracingAdapter.Exit(invocationId, result);
}
return result;
}
finally
{
if (httpResponse != null)
{
httpResponse.Dispose();
}
}
}
finally
{
if (httpRequest != null)
{
httpRequest.Dispose();
}
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved. See License.txt in the project root for license information.
using System.Windows.Controls;
using System.Windows;
using System.Windows.Media;
using System.Collections.ObjectModel;
using System;
using System.Windows.Data;
using System.Collections;
using System.ComponentModel;
using System.Collections.Specialized;
namespace StockTraderRI.ChartControls
{
public class ContinuousAxisPanel : Panel
{
public ContinuousAxisPanel()
{
_largestLabelSize = new Size();
SetValue(ItemsSourceKey, new ObservableCollection<String>());
YValues = new ObservableCollection<double>();
SetValue(TickPositionsKey, new ObservableCollection<double>());
}
protected override void OnInitialized(EventArgs e)
{
base.OnInitialized(e);
_parentControl = ((ContinuousAxis)((FrameworkElement)VisualTreeHelper.GetParent(this)).TemplatedParent);
if (_parentControl != null)
{
Binding valueBinding = new Binding();
valueBinding.Source = _parentControl;
valueBinding.Path = new PropertyPath(ContinuousAxis.SourceValuesProperty);
this.SetBinding(ContinuousAxisPanel.DataValuesProperty, valueBinding);
Binding itemsBinding = new Binding();
itemsBinding.Source = this;
itemsBinding.Path = new PropertyPath(ContinuousAxisPanel.ItemsSourceProperty);
_parentControl.SetBinding(ContinuousAxis.ItemsSourceProperty, itemsBinding);
Binding refLineBinding = new Binding();
refLineBinding.Source = _parentControl;
refLineBinding.Path = new PropertyPath(ContinuousAxis.ReferenceLineSeperationProperty);
this.SetBinding(ContinuousAxisPanel.ReferenceLineSeperationProperty, refLineBinding);
Binding outputBinding = new Binding();
outputBinding.Source = this;
outputBinding.Path = new PropertyPath(ContinuousAxisPanel.YValuesProperty);
_parentControl.SetBinding(ContinuousAxis.ValuesProperty, outputBinding);
Binding tickPositionBinding = new Binding();
tickPositionBinding.Source = this;
tickPositionBinding.Path = new PropertyPath(ContinuousAxisPanel.TickPositionsProperty);
_parentControl.SetBinding(ContinuousAxis.TickPositionsProperty, tickPositionBinding);
Binding zerobinding = new Binding();
zerobinding.Source = this;
zerobinding.Path = new PropertyPath(ContinuousAxisPanel.OriginProperty);
_parentControl.SetBinding(ContinuousAxis.OriginProperty, zerobinding);
}
}
public static void OnDataValuesChanged(DependencyObject sender, DependencyPropertyChangedEventArgs e)
{
ContinuousAxisPanel p = sender as ContinuousAxisPanel;
if (p != null && p.DataValues != null)
{
((INotifyCollectionChanged)p.DataValues).CollectionChanged += new NotifyCollectionChangedEventHandler(p.Axis2Panel_CollectionChanged);
p.GenerateItemsSource();
}
}
public void Axis2Panel_CollectionChanged(object sender, NotifyCollectionChangedEventArgs e)
{
GenerateItemsSource();
}
private void GenerateItemsSource()
{
if (DataValues==null || DataValues.Count==0)
{
return;
}
CalculateValueIncrement(_arrangeSize);
ObservableCollection<String> tempItemsSource = ItemsSource;
tempItemsSource.Clear();
int referenceLinesCreated = 0;
while (referenceLinesCreated != _numReferenceLines)
{
if (Orientation.Equals(Orientation.Vertical))
tempItemsSource.Add(((double)(_startingIncrement + referenceLinesCreated * _valueIncrement)).ToString());
else
tempItemsSource.Add(((double)(_startingIncrement + (_numReferenceLines - 1 - referenceLinesCreated) * _valueIncrement)).ToString());
referenceLinesCreated++;
}
_highValue = _startingIncrement + (_numReferenceLines - 1) * _valueIncrement;
_lowValue = _startingIncrement;
}
protected override Size MeasureOverride(Size availableSize)
{
_largestLabelSize.Height = 0.0;
_largestLabelSize.Width = 0.0;
UIElementCollection tempInternalChildren = InternalChildren;
for (int i = 0; i < tempInternalChildren.Count; i++)
{
tempInternalChildren[i].Measure(availableSize);
_largestLabelSize.Height = _largestLabelSize.Height > tempInternalChildren[i].DesiredSize.Height
? _largestLabelSize.Height : tempInternalChildren[i].DesiredSize.Height;
_largestLabelSize.Width = _largestLabelSize.Width > tempInternalChildren[i].DesiredSize.Width
? _largestLabelSize.Width : tempInternalChildren[i].DesiredSize.Width;
}
if (Orientation.Equals(Orientation.Vertical))
{
double fitAllLabelSize = _largestLabelSize.Height * InternalChildren.Count;
availableSize.Height = fitAllLabelSize < availableSize.Height ? fitAllLabelSize : availableSize.Height;
availableSize.Width = _largestLabelSize.Width;
}
else
{
double fitAllLabelsSize = _largestLabelSize.Width * InternalChildren.Count;
availableSize.Width = fitAllLabelsSize < availableSize.Width ? fitAllLabelsSize : availableSize.Width;
availableSize.Height = _largestLabelSize.Height;
}
return availableSize;
}
protected override Size ArrangeOverride(Size finalSize)
{
if (!_arrangeSize.Equals(finalSize))
{
_arrangeSize = finalSize;
GenerateItemsSource();
}
_arrangeSize = finalSize;
if (InternalChildren.Count > 0)
{
if (Orientation.Equals(Orientation.Vertical))
{
ArrangeVerticalLabels(finalSize);
CalculateYOutputValues(finalSize);
}
else
{
ArrangeHorizontalLabels(finalSize);
CalculateXOutputValues(finalSize);
}
}
return base.ArrangeOverride(finalSize);
}
private void ArrangeHorizontalLabels(Size constraint)
{
double rectWidth = _largestLabelSize.Width;
double rectHeight = _largestLabelSize.Height;
double increments = CalculatePixelIncrements(constraint, _largestLabelSize);
double start_width = constraint.Width - _largestLabelSize.Width / 2;
double end_width = start_width - (InternalChildren.Count - 1) * increments;
ObservableCollection<double> tempTickPositions = TickPositions;
if (start_width > end_width)
{
tempTickPositions.Clear();
Rect r = new Rect(start_width - rectWidth / 2, 0, rectWidth, rectHeight);
InternalChildren[0].Arrange(r);
tempTickPositions.Add(start_width);
int count = InternalChildren.Count - 1;
r = new Rect(start_width - count * increments - rectWidth / 2, 0, rectWidth, rectHeight);
InternalChildren[count].Arrange(r);
tempTickPositions.Add(start_width - count * increments);
if (constraint.Width > 3 * rectWidth)
{
_skipFactor = (int)Math.Ceiling((InternalChildren.Count - 2) / Math.Floor((constraint.Width - 2 * rectWidth) / rectWidth));
if ((InternalChildren.Count - 2) != 2.0)
_skipFactor = Math.Min(_skipFactor, (int)Math.Ceiling((double)(InternalChildren.Count - 2.0) / 2.0));
_canDisplayAllLabels = true;
if (_skipFactor > 1)
{
_canDisplayAllLabels = false;
}
for (int i = 2; i <= InternalChildren.Count - 1; i++)
{
tempTickPositions.Add(start_width - (i - 1) * increments);
if (_canDisplayAllLabels || (i + 1) % _skipFactor == 0)
{
r = new Rect(start_width - (i - 1) * increments - rectWidth / 2, 0, rectWidth, rectHeight);
InternalChildren[i-1].Arrange(r);
}
else
{
InternalChildren[i-1].Arrange(new Rect(0, 0, 0, 0));
}
}
}
}
}
private void ArrangeVerticalLabels(Size constraint)
{
double rectWidth = _largestLabelSize.Width;
double rectHeight = _largestLabelSize.Height;
double increments = CalculatePixelIncrements(constraint, _largestLabelSize);
double start_height = constraint.Height - _largestLabelSize.Height / 2;
double end_height = start_height - (InternalChildren.Count - 1) * increments;
ObservableCollection<double> tempTickPositions = TickPositions;
if(start_height > end_height)
{
tempTickPositions.Clear();
Rect r = new Rect(constraint.Width - rectWidth, (start_height - rectHeight / 2), rectWidth, rectHeight);
InternalChildren[0].Arrange(r);
tempTickPositions.Add(start_height);
int count = InternalChildren.Count-1;
r = new Rect(constraint.Width - rectWidth, (start_height - count*increments - rectHeight / 2), rectWidth, rectHeight);
InternalChildren[count].Arrange(r);
tempTickPositions.Add(start_height - count * increments);
if (constraint.Height > 3 * rectHeight)
{
_skipFactor = (int)Math.Ceiling((InternalChildren.Count - 2) / Math.Floor((constraint.Height - 2 * rectHeight) / rectHeight));
if ((InternalChildren.Count - 2) != 2.0)
_skipFactor = Math.Min(_skipFactor, (int)Math.Ceiling((double)(InternalChildren.Count - 2.0) / 2.0));
_canDisplayAllLabels = true;
if (_skipFactor > 1)
{
_canDisplayAllLabels = false;
}
for (int i = 2; i <= InternalChildren.Count-1; i++)
{
tempTickPositions.Add(start_height - (i - 1) * increments);
if (_canDisplayAllLabels || (i + 1) % _skipFactor == 0 )
{
r = new Rect(constraint.Width - rectWidth, (start_height - (i - 1) * increments - rectHeight / 2), rectWidth, rectHeight);
InternalChildren[i - 1].Arrange(r);
}
else
{
InternalChildren[i - 1].Arrange(new Rect(0, 0, 0, 0));
}
}
}
}
}
private void CalculateYOutputValues(Size constraint)
{
YValues.Clear();
double start_val, lowPixel, highPixel;
double pixelIncrement = CalculatePixelIncrements(constraint, _largestLabelSize);
if (Orientation.Equals(Orientation.Vertical))
{
start_val = constraint.Height - _largestLabelSize.Height / 2;
lowPixel = start_val - (InternalChildren.Count - 1) * pixelIncrement;
highPixel = start_val;
}
else
{
start_val = constraint.Width - _largestLabelSize.Width / 2;
lowPixel = start_val - (InternalChildren.Count - 1) * pixelIncrement;
highPixel = start_val;
}
if (highPixel < lowPixel)
return;
for (int i = 0; i < DataValues.Count; i++)
{
double outVal = highPixel - ((highPixel - lowPixel) / (_highValue - _lowValue)) * (DataValues[i] - _lowValue);
YValues.Add(outVal);
}
if (_startsAtZero || (!_allNegativeValues && !_allPositiveValues))
Origin = highPixel - ((highPixel - lowPixel) / (_highValue - _lowValue)) * (0.0 - _lowValue);
else if (!_startsAtZero && _allPositiveValues)
Origin = highPixel;
else
Origin = lowPixel;
}
private void CalculateXOutputValues(Size constraint)
{
YValues.Clear();
double start_width = constraint.Width - _largestLabelSize.Width / 2;
double pixelIncrement = CalculatePixelIncrements(constraint, _largestLabelSize);
double lowPixel = start_width - (InternalChildren.Count - 1) * pixelIncrement;
double highPixel = start_width;
if (highPixel < lowPixel)
return;
for (int i = 0; i < DataValues.Count; i++)
{
double output = lowPixel + ((highPixel - lowPixel) / (_highValue - _lowValue)) * (DataValues[i] - _lowValue);
YValues.Add(output);
}
if (_startsAtZero || (!_allNegativeValues && !_allPositiveValues))
Origin = lowPixel + ((highPixel - lowPixel) / (_highValue - _lowValue)) * (0.0 - _lowValue);
else if (!_startsAtZero && _allPositiveValues)
Origin = lowPixel;
else
Origin = highPixel;
}
/// <summary>
/// Calculate the pixel distance between each tick mark on the vertical axis
/// </summary>
/// <param name="constraint"></param>
/// <returns></returns>
private double CalculatePixelIncrements(Size constraint, Size labelSize)
{
if(Orientation.Equals(Orientation.Vertical))
return (constraint.Height - _largestLabelSize.Height) / (_numReferenceLines - 1);
else
return (constraint.Width - _largestLabelSize.Width) / (_numReferenceLines - 1);
}
private double CalculateValueIncrement(Size size)
{
// Determine if the starting value is 0 or not
bool startsAtZero = false;
bool allPositiveValues = true;
bool allNegativeValues = true;
double increment_value = 0;
int multiplier = 1;
if (DataValues.Count == 0)
return 0.0;
//double low = ((DoubleHolder)DataValues[0]).DoubleValue;
//double high = ((DoubleHolder)DataValues[0]).DoubleValue;
double low = DataValues[0];
double high = DataValues[0];
for (int i = 0; i < DataValues.Count; i++)
{
//double temp = ((DoubleHolder)DataValues[i]).DoubleValue;
double temp = DataValues[i];
// Check for positive and negative values
if (temp > 0)
{
allNegativeValues = false;
}
else if (temp < 0)
{
allPositiveValues = false;
}
// Reset low and high if necessary
if (temp < low)
{
low = temp;
}
else if (temp > high)
{
high = temp;
}
}
// Determine whether or not the increments will start at zero
if (allPositiveValues && (low < (high / 2)) ||
(allNegativeValues && high > (low / 2)))
{
_startsAtZero = true;
startsAtZero = true;
}
// If all values in dataset are 0, draw one reference line and label it 0
if (high == 0 && low == 0)
{
_valueIncrement = 0;
_startingIncrement = 0;
_numReferenceLines = 1;
_startsAtZero = startsAtZero;
return increment_value;
}
// Find an increment value that is in the set {1*10^x, 2*10^x, 5*10^x, where x is an integer
// (positive, negative, or zero)}
if (!allNegativeValues)
{
if (startsAtZero)
{
int exp = 0;
while (true)
{
multiplier = IsWithinRange(high, exp, size);
if (multiplier != -1)
{
break;
}
multiplier = IsWithinRange(high, (-1 * exp), size);
if (multiplier != -1)
{
exp = -1 * exp;
break;
}
exp++;
}
increment_value = multiplier * Math.Pow(10, exp);
}
else
{
int exp = 0;
while (true)
{
multiplier = IsWithinRange((high - low), exp, size);
if (multiplier != -1)
{
break;
}
multiplier = IsWithinRange((high - low), (-1 * exp), size);
if (multiplier != -1)
{
exp = -1 * exp;
break;
}
if (high == low)
{
increment_value = high;
_valueIncrement = increment_value;
_numReferenceLines = 1;
break;
}
exp++;
}
if (increment_value == 0)
{
increment_value = multiplier * Math.Pow(10, exp);
}
}
}
else
{
if (startsAtZero)
{
int exp = 0;
while (true)
{
multiplier = IsWithinRange(low, exp, size);
if (multiplier != -1)
{
break;
}
multiplier = IsWithinRange(low, (-1 * exp), size);
if (multiplier != -1)
{
exp = -1 * exp;
break;
}
exp++;
}
increment_value = multiplier * Math.Pow(10, exp);
}
else
{
int exp = 0;
if (low - high == 0.0)
increment_value = 1.0;
else
{
while (true)
{
multiplier = IsWithinRange((low - high), exp, size);
if (multiplier != -1)
{
break;
}
multiplier = IsWithinRange((low - high), (-1 * exp), size);
if (multiplier != -1)
{
exp = -1 * exp;
break;
}
exp++;
}
increment_value = multiplier * Math.Pow(10, exp);
}
}
}
double starting_value = 0;
// Determine starting value if it is nonzero
if (!startsAtZero)
{
if (allPositiveValues)
{
if (low % increment_value == 0)
{
starting_value = low;
}
else
{
starting_value = (int)(low / increment_value) * increment_value;
}
}
else
{
if (low % increment_value == 0)
{
starting_value = low;
}
else
{
starting_value = (int)((low - increment_value) / increment_value) * increment_value;
}
}
}
else if (startsAtZero && allNegativeValues)
{
if (low % increment_value == 0)
{
starting_value = low;
}
else
{
starting_value = (int)((low - increment_value) / increment_value) * increment_value;
}
}
// Determine the number of reference lines
//int numRefLines = 0;
int numRefLines = (int)Math.Ceiling((high - starting_value) / increment_value) + 1;
_valueIncrement = increment_value;
_startingIncrement = starting_value;
_numReferenceLines = numRefLines;
_startsAtZero = startsAtZero;
_allPositiveValues = allPositiveValues;
_allNegativeValues = allNegativeValues;
return increment_value;
}
/// <summary>
/// Checks to see if the calculated increment value is between the low and high passed in,
/// then returns the multiplier used
/// </summary>
/// <param name="numerator"></param>
/// <param name="exponent"></param>
/// <param name="lowRange"></param>
/// <param name="highRange"></param>
/// <returns></returns>
private int IsWithinRange(double numerator, int exponent, Size size)
{
int highRange, lowRange;
// highRange = (int)Math.Min(10, (int)(size.Height / labelSize.Height)) -2;
if(Orientation.Equals(Orientation.Vertical))
highRange = (int)(size.Height / ReferenceLineSeperation);
else
highRange = (int)(size.Width / ReferenceLineSeperation);
lowRange = 1;
highRange = (int)Math.Max(highRange, 3);
if ((Math.Abs(numerator) / (1 * Math.Pow(10, exponent))) >= lowRange && (Math.Abs(numerator) / (1 * Math.Pow(10, exponent))) <= highRange)
{
return 1;
}
if ((Math.Abs(numerator) / (2 * Math.Pow(10, exponent))) >= lowRange && (Math.Abs(numerator) / (2 * Math.Pow(10, exponent))) <= highRange)
{
return 2;
}
if ((Math.Abs(numerator) / (5 * Math.Pow(10, exponent))) >= lowRange && (Math.Abs(numerator) / (5 * Math.Pow(10, exponent))) <= highRange)
{
return 5;
}
return -1;
}
public ObservableCollection<double> YValues
{
get { return (ObservableCollection<double>)GetValue(YValuesProperty); }
set { SetValue(YValuesProperty, value); }
}
// Using a DependencyProperty as the backing store for YValues. This enables animation, styling, binding, etc...
public static readonly DependencyProperty YValuesProperty =
DependencyProperty.Register("YValues", typeof(ObservableCollection<double>), typeof(ContinuousAxisPanel), new UIPropertyMetadata(null));
public ObservableCollection<String> ItemsSource
{
get { return (ObservableCollection<String>)GetValue(ItemsSourceProperty); }
}
// Using a DependencyProperty as the backing store for Axis2Panel. This enables animation, styling, binding, etc...
private static readonly DependencyPropertyKey ItemsSourceKey =
DependencyProperty.RegisterReadOnly("ItemsSource", typeof(ObservableCollection<String>), typeof(ContinuousAxisPanel), new UIPropertyMetadata());
public static readonly DependencyProperty ItemsSourceProperty = ItemsSourceKey.DependencyProperty;
private ObservableCollection<double> DataValues
{
get { return (ObservableCollection<double>)GetValue(DataValuesProperty); }
set { SetValue(DataValuesProperty, value); }
}
// Using a DependencyProperty as the backing store for DataValues. This enables animation, styling, binding, etc...
private static readonly DependencyProperty DataValuesProperty =
DependencyProperty.Register("DataValues", typeof(ObservableCollection<double>), typeof(ContinuousAxisPanel), new FrameworkPropertyMetadata(OnDataValuesChanged));
public ObservableCollection<double> TickPositions
{
get { return (ObservableCollection<double>)GetValue(TickPositionsProperty); }
//set { SetValue(TickPositionsProperty, value); }
}
// Using a DependencyProperty as the backing store for TickPositions. This enables animation, styling, binding, etc...
private static readonly DependencyPropertyKey TickPositionsKey =
DependencyProperty.RegisterReadOnly("TickPositions", typeof(ObservableCollection<double>), typeof(ContinuousAxisPanel), new UIPropertyMetadata(null));
public static readonly DependencyProperty TickPositionsProperty = TickPositionsKey.DependencyProperty;
public double Origin
{
get { return (double)GetValue(OriginProperty); }
set { SetValue(OriginProperty, value); }
}
// Using a DependencyProperty as the backing store for ZeroReferenceLinePosition. This enables animation, styling, binding, etc...
public static readonly DependencyProperty OriginProperty =
DependencyProperty.Register("Origin", typeof(double), typeof(ContinuousAxisPanel), new UIPropertyMetadata(0.0));
public Orientation Orientation
{
get { return (Orientation)GetValue(OrientationProperty); }
set { SetValue(OrientationProperty, value); }
}
// Using a DependencyProperty as the backing store for Orientation. This enables animation, styling, binding, etc...
public static readonly DependencyProperty OrientationProperty =
DependencyProperty.Register("Orientation", typeof(Orientation), typeof(ContinuousAxisPanel), new UIPropertyMetadata(Orientation.Vertical));
public double ReferenceLineSeperation
{
get { return (double)GetValue(ReferenceLineSeperationProperty); }
set { SetValue(ReferenceLineSeperationProperty, value); }
}
// Using a DependencyProperty as the backing store for ReferenceLineSeperation. This enables animation, styling, binding, etc...
public static readonly DependencyProperty ReferenceLineSeperationProperty =
DependencyProperty.Register("ReferenceLineSeperation", typeof(double), typeof(ContinuousAxisPanel), new UIPropertyMetadata(null));
private Size _largestLabelSize;
private bool _canDisplayAllLabels;
private int _skipFactor;
private double _lowValue, _highValue;
private Size _arrangeSize;
public ItemsControl _parentControl;
private bool _startsAtZero;
private double _startingIncrement;
private double _valueIncrement;
private int _numReferenceLines;
private bool _allPositiveValues;
private bool _allNegativeValues;
}
}
| |
/* ****************************************************************************
*
* Copyright (c) Microsoft Corporation.
*
* This source code is subject to terms and conditions of the Apache License, Version 2.0. A
* copy of the license can be found in the License.html file at the root of this distribution. If
* you cannot locate the Apache License, Version 2.0, please send an email to
* [email protected]. By using this source code in any fashion, you are agreeing to be bound
* by the terms of the Apache License, Version 2.0.
*
* You must not remove this notice, or any other, from this software.
*
*
* ***************************************************************************/
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Reflection;
using System.Dynamic;
using IronPython.Runtime.Binding;
using IronPython.Runtime.Types;
using Microsoft.Scripting;
using Microsoft.Scripting.Actions;
using Microsoft.Scripting.Generation;
using Microsoft.Scripting.Runtime;
using Microsoft.Scripting.Utils;
namespace IronPython.Runtime.Operations {
internal static class PythonTypeOps {
private static readonly Dictionary<FieldInfo, PythonTypeSlot> _fieldCache = new Dictionary<FieldInfo, PythonTypeSlot>();
private static readonly Dictionary<BuiltinFunction, BuiltinMethodDescriptor> _methodCache = new Dictionary<BuiltinFunction, BuiltinMethodDescriptor>();
private static readonly Dictionary<BuiltinFunction, ClassMethodDescriptor> _classMethodCache = new Dictionary<BuiltinFunction, ClassMethodDescriptor>();
internal static readonly Dictionary<BuiltinFunctionKey, BuiltinFunction> _functions = new Dictionary<BuiltinFunctionKey, BuiltinFunction>();
private static readonly Dictionary<ReflectionCache.MethodBaseCache, ConstructorFunction> _ctors = new Dictionary<ReflectionCache.MethodBaseCache, ConstructorFunction>();
private static readonly Dictionary<EventTracker, ReflectedEvent> _eventCache = new Dictionary<EventTracker, ReflectedEvent>();
internal static readonly Dictionary<PropertyTracker, ReflectedGetterSetter> _propertyCache = new Dictionary<PropertyTracker, ReflectedGetterSetter>();
internal static PythonTuple MroToPython(IList<PythonType> types) {
List<object> res = new List<object>(types.Count);
foreach (PythonType dt in types) {
if (dt.UnderlyingSystemType == typeof(ValueType)) continue; // hide value type
if(dt.OldClass != null) {
res.Add(dt.OldClass);
} else {
res.Add(dt);
}
}
return PythonTuple.Make(res);
}
internal static string GetModuleName(CodeContext/*!*/ context, Type type) {
Type curType = type;
while (curType != null) {
string moduleName;
if (PythonContext.GetContext(context).BuiltinModuleNames.TryGetValue(curType, out moduleName)) {
return moduleName;
}
curType = curType.DeclaringType;
}
FieldInfo modField = type.GetField("__module__");
if (modField != null && modField.IsLiteral && modField.FieldType == typeof(string)) {
return (string)modField.GetRawConstantValue();
}
return "builtins";
}
internal static object CallParams(CodeContext/*!*/ context, PythonType cls, params object[] args\u03c4) {
if (args\u03c4 == null) args\u03c4 = ArrayUtils.EmptyObjects;
return CallWorker(context, cls, args\u03c4);
}
internal static object CallWorker(CodeContext/*!*/ context, PythonType dt, object[] args) {
object newObject = PythonOps.CallWithContext(context, GetTypeNew(context, dt), ArrayUtils.Insert<object>(dt, args));
if (ShouldInvokeInit(dt, DynamicHelpers.GetPythonType(newObject), args.Length)) {
PythonOps.CallWithContext(context, GetInitMethod(context, dt, newObject), args);
AddFinalizer(context, dt, newObject);
}
return newObject;
}
internal static object CallWorker(CodeContext/*!*/ context, PythonType dt, IDictionary<string, object> kwArgs, object[] args) {
object[] allArgs = ArrayOps.CopyArray(args, kwArgs.Count + args.Length);
string[] argNames = new string[kwArgs.Count];
int i = args.Length;
foreach (KeyValuePair<string, object> kvp in kwArgs) {
allArgs[i] = kvp.Value;
argNames[i++ - args.Length] = kvp.Key;
}
return CallWorker(context, dt, new KwCallInfo(allArgs, argNames));
}
internal static object CallWorker(CodeContext/*!*/ context, PythonType dt, KwCallInfo args) {
object[] clsArgs = ArrayUtils.Insert<object>(dt, args.Arguments);
object newObject = PythonOps.CallWithKeywordArgs(context,
GetTypeNew(context, dt),
clsArgs,
args.Names);
if (newObject == null) return null;
if (ShouldInvokeInit(dt, DynamicHelpers.GetPythonType(newObject), args.Arguments.Length)) {
PythonOps.CallWithKeywordArgs(context, GetInitMethod(context, dt, newObject), args.Arguments, args.Names);
AddFinalizer(context, dt, newObject);
}
return newObject;
}
/// <summary>
/// Looks up __init__ avoiding calls to __getattribute__ and handling both
/// new-style and old-style classes in the MRO.
/// </summary>
private static object GetInitMethod(CodeContext/*!*/ context, PythonType dt, object newObject) {
// __init__ is never searched for w/ __getattribute__
for (int i = 0; i < dt.ResolutionOrder.Count; i++) {
PythonType cdt = dt.ResolutionOrder[i];
PythonTypeSlot dts;
object value;
if (cdt.IsOldClass) {
OldClass oc = PythonOps.ToPythonType(cdt) as OldClass;
if (oc != null && oc.TryGetBoundCustomMember(context, "__init__", out value)) {
return oc.GetOldStyleDescriptor(context, value, newObject, oc);
}
// fall through to new-style only case. We might accidently
// detect old-style if the user imports a IronPython.NewTypes
// type.
}
if (cdt.TryLookupSlot(context, "__init__", out dts) &&
dts.TryGetValue(context, newObject, dt, out value)) {
return value;
}
}
return null;
}
private static void AddFinalizer(CodeContext/*!*/ context, PythonType dt, object newObject) {
// check if object has finalizer...
PythonTypeSlot dummy;
if (dt.TryResolveSlot(context, "__del__", out dummy)) {
IWeakReferenceable iwr = context.GetPythonContext().ConvertToWeakReferenceable(newObject);
Debug.Assert(iwr != null);
InstanceFinalizer nif = new InstanceFinalizer(context, newObject);
iwr.SetFinalizer(new WeakRefTracker(nif, nif));
}
}
private static object GetTypeNew(CodeContext/*!*/ context, PythonType dt) {
PythonTypeSlot dts;
if (!dt.TryResolveSlot(context, "__new__", out dts)) {
throw PythonOps.TypeError("cannot create instances of {0}", dt.Name);
}
object newInst;
bool res = dts.TryGetValue(context, dt, dt, out newInst);
Debug.Assert(res);
return newInst;
}
internal static bool IsRuntimeAssembly(Assembly assembly) {
if (assembly == typeof(PythonOps).GetTypeInfo().Assembly || // IronPython.dll
assembly == typeof(Microsoft.Scripting.Interpreter.LightCompiler).GetTypeInfo().Assembly || // Microsoft.Scripting.dll
assembly == typeof(DynamicMetaObject).GetTypeInfo().Assembly) { // Microsoft.Scripting.Core.dll
return true;
}
AssemblyName assemblyName = new AssemblyName(assembly.FullName);
if (assemblyName.Name.Equals("IronPython.Modules")) { // IronPython.Modules.dll
return true;
}
return false;
}
private static bool ShouldInvokeInit(PythonType cls, PythonType newObjectType, int argCnt) {
// don't run __init__ if it's not a subclass of ourselves,
// or if this is the user doing type(x), or if it's a standard
// .NET type which doesn't have an __init__ method (this is a perf optimization)
return (!cls.IsSystemType || cls.IsPythonType) &&
newObjectType.IsSubclassOf(cls) &&
(cls != TypeCache.PythonType || argCnt > 1);
}
// note: returns "instance" rather than type name if o is an OldInstance
internal static string GetName(object o) {
return DynamicHelpers.GetPythonType(o).Name;
}
// a version of GetName that also works on old-style classes
internal static string GetOldName(object o) {
return o is OldInstance ? GetOldName((OldInstance)o) : GetName(o);
}
// a version of GetName that also works on old-style classes
internal static string GetOldName(OldInstance instance) {
return instance._class.Name;
}
internal static PythonType[] ObjectTypes(object[] args) {
PythonType[] types = new PythonType[args.Length];
for (int i = 0; i < args.Length; i++) {
types[i] = DynamicHelpers.GetPythonType(args[i]);
}
return types;
}
internal static Type[] ConvertToTypes(PythonType[] pythonTypes) {
Type[] types = new Type[pythonTypes.Length];
for (int i = 0; i < pythonTypes.Length; i++) {
types[i] = ConvertToType(pythonTypes[i]);
}
return types;
}
private static Type ConvertToType(PythonType pythonType) {
if (pythonType.IsNull) {
return typeof(DynamicNull);
} else {
return pythonType.UnderlyingSystemType;
}
}
internal static TrackerTypes GetMemberType(MemberGroup members) {
TrackerTypes memberType = TrackerTypes.All;
for (int i = 0; i < members.Count; i++) {
MemberTracker mi = members[i];
if (mi.MemberType != memberType) {
if (memberType != TrackerTypes.All) {
return TrackerTypes.All;
}
memberType = mi.MemberType;
}
}
return memberType;
}
internal static PythonTypeSlot/*!*/ GetSlot(MemberGroup group, string name, bool privateBinding) {
if (group.Count == 0) {
return null;
}
group = FilterNewSlots(group);
TrackerTypes tt = GetMemberType(group);
switch(tt) {
case TrackerTypes.Method:
bool checkStatic = false;
List<MemberInfo> mems = new List<MemberInfo>();
foreach (MemberTracker mt in group) {
MethodTracker metht = (MethodTracker)mt;
mems.Add(metht.Method);
checkStatic |= metht.IsStatic;
}
Type declType = group[0].DeclaringType;
MemberInfo[] memArray = mems.ToArray();
FunctionType ft = GetMethodFunctionType(declType, memArray, checkStatic);
return GetFinalSlotForFunction(GetBuiltinFunction(declType, group[0].Name, name, ft, memArray));
case TrackerTypes.Field:
return GetReflectedField(((FieldTracker)group[0]).Field);
case TrackerTypes.Property:
return GetReflectedProperty((PropertyTracker)group[0], group, privateBinding);
case TrackerTypes.Event:
return GetReflectedEvent(((EventTracker)group[0]));
case TrackerTypes.Type:
TypeTracker type = (TypeTracker)group[0];
for (int i = 1; i < group.Count; i++) {
type = TypeGroup.UpdateTypeEntity(type, (TypeTracker)group[i]);
}
if (type is TypeGroup) {
return new PythonTypeUserDescriptorSlot(type, true);
}
return new PythonTypeUserDescriptorSlot(DynamicHelpers.GetPythonTypeFromType(type.Type), true);
case TrackerTypes.Constructor:
return GetConstructor(group[0].DeclaringType, privateBinding);
case TrackerTypes.Custom:
return ((PythonCustomTracker)group[0]).GetSlot();
default:
// if we have a new slot in the derived class filter out the
// members from the base class.
throw new InvalidOperationException(String.Format("Bad member type {0} on {1}.{2}", tt.ToString(), group[0].DeclaringType, name));
}
}
internal static MemberGroup FilterNewSlots(MemberGroup group) {
if (GetMemberType(group) == TrackerTypes.All) {
Type declType = group[0].DeclaringType;
for (int i = 1; i < group.Count; i++) {
if (group[i].DeclaringType != declType) {
if (group[i].DeclaringType.IsSubclassOf(declType)) {
declType = group[i].DeclaringType;
}
}
}
List<MemberTracker> trackers = new List<MemberTracker>();
for (int i = 0; i < group.Count; i++) {
if (group[i].DeclaringType == declType) {
trackers.Add(group[i]);
}
}
if (trackers.Count != group.Count) {
return new MemberGroup(trackers.ToArray());
}
}
return group;
}
private static BuiltinFunction GetConstructor(Type t, bool privateBinding) {
BuiltinFunction ctorFunc = InstanceOps.NonDefaultNewInst;
MethodBase[] ctors = CompilerHelpers.GetConstructors(t, privateBinding, true);
return GetConstructor(t, ctorFunc, ctors);
}
internal static bool IsDefaultNew(MethodBase[] targets) {
if (targets.Length == 1) {
ParameterInfo[] pis = targets[0].GetParameters();
if (pis.Length == 0) {
return true;
}
if (pis.Length == 1 && pis[0].ParameterType == typeof(CodeContext)) {
return true;
}
}
return false;
}
internal static BuiltinFunction GetConstructorFunction(Type type, string name) {
List<MethodBase> methods = new List<MethodBase>();
bool hasDefaultConstructor = false;
foreach (ConstructorInfo ci in type.GetConstructors(BindingFlags.Public | BindingFlags.Instance)) {
if (ci.IsPublic) {
if (ci.GetParameters().Length == 0) {
hasDefaultConstructor = true;
}
methods.Add(ci);
}
}
if (type.IsValueType() && !hasDefaultConstructor && type != typeof(void)) {
try {
methods.Add(typeof(ScriptingRuntimeHelpers).GetMethod("CreateInstance", ReflectionUtils.EmptyTypes).MakeGenericMethod(type));
} catch (BadImageFormatException) {
// certain types (e.g. ArgIterator) won't survive the above call.
// we won't let you create instances of these types.
}
}
if (methods.Count > 0) {
return BuiltinFunction.MakeFunction(name, methods.ToArray(), type);
}
return null;
}
internal static ReflectedEvent GetReflectedEvent(EventTracker tracker) {
ReflectedEvent res;
lock (_eventCache) {
if (!_eventCache.TryGetValue(tracker, out res)) {
if (PythonBinder.IsExtendedType(tracker.DeclaringType)) {
_eventCache[tracker] = res = new ReflectedEvent(tracker, true);
} else {
_eventCache[tracker] = res = new ReflectedEvent(tracker, false);
}
}
}
return res;
}
internal static PythonTypeSlot/*!*/ GetFinalSlotForFunction(BuiltinFunction/*!*/ func) {
if ((func.FunctionType & FunctionType.Method) != 0) {
BuiltinMethodDescriptor desc;
lock (_methodCache) {
if (!_methodCache.TryGetValue(func, out desc)) {
_methodCache[func] = desc = new BuiltinMethodDescriptor(func);
}
return desc;
}
}
if (func.Targets[0].IsDefined(typeof(ClassMethodAttribute), true)) {
lock (_classMethodCache) {
ClassMethodDescriptor desc;
if (!_classMethodCache.TryGetValue(func, out desc)) {
_classMethodCache[func] = desc = new ClassMethodDescriptor(func);
}
return desc;
}
}
return func;
}
internal static BuiltinFunction/*!*/ GetBuiltinFunction(Type/*!*/ type, string/*!*/ name, MemberInfo/*!*/[]/*!*/ mems) {
return GetBuiltinFunction(type, name, null, mems);
}
#pragma warning disable 414 // unused fields - they're used by GetHashCode()
internal struct BuiltinFunctionKey {
Type DeclaringType;
ReflectionCache.MethodBaseCache Cache;
FunctionType FunctionType;
public BuiltinFunctionKey(Type declaringType, ReflectionCache.MethodBaseCache cache, FunctionType funcType) {
Cache = cache;
FunctionType = funcType;
DeclaringType = declaringType;
}
}
#pragma warning restore 169
public static MethodBase[] GetNonBaseHelperMethodInfos(MemberInfo[] members) {
List<MethodBase> res = new List<MethodBase>();
foreach (MemberInfo mi in members) {
MethodBase mb = mi as MethodBase;
if (mb != null && !mb.Name.StartsWith(NewTypeMaker.BaseMethodPrefix)) {
res.Add(mb);
}
}
return res.ToArray();
}
public static MemberInfo[] GetNonBaseHelperMemberInfos(MemberInfo[] members) {
List<MemberInfo> res = new List<MemberInfo>(members.Length);
foreach (MemberInfo mi in members) {
MethodBase mb = mi as MethodBase;
if (mb == null || !mb.Name.StartsWith(NewTypeMaker.BaseMethodPrefix)) {
res.Add(mi);
}
}
return res.ToArray();
}
internal static BuiltinFunction/*!*/ GetBuiltinFunction(Type/*!*/ type, string/*!*/ name, FunctionType? funcType, params MemberInfo/*!*/[]/*!*/ mems) {
return GetBuiltinFunction(type, name, name, funcType, mems);
}
/// <summary>
/// Gets a builtin function for the given declaring type and member infos.
///
/// Given the same inputs this always returns the same object ensuring there's only 1 builtinfunction
/// for each .NET method.
///
/// This method takes both a cacheName and a pythonName. The cache name is the real method name. The pythonName
/// is the name of the method as exposed to Python.
/// </summary>
internal static BuiltinFunction/*!*/ GetBuiltinFunction(Type/*!*/ type, string/*!*/ cacheName, string/*!*/ pythonName, FunctionType? funcType, params MemberInfo/*!*/[]/*!*/ mems) {
BuiltinFunction res = null;
if (mems.Length != 0) {
FunctionType ft = funcType ?? GetMethodFunctionType(type, mems);
type = GetBaseDeclaringType(type, mems);
BuiltinFunctionKey cache = new BuiltinFunctionKey(type, new ReflectionCache.MethodBaseCache(cacheName, GetNonBaseHelperMethodInfos(mems)), ft);
lock (_functions) {
if (!_functions.TryGetValue(cache, out res)) {
if (PythonTypeOps.GetFinalSystemType(type) == type) {
IList<MethodInfo> overriddenMethods = NewTypeMaker.GetOverriddenMethods(type, cacheName);
if (overriddenMethods.Count > 0) {
List<MemberInfo> newMems = new List<MemberInfo>(mems);
foreach (MethodInfo mi in overriddenMethods) {
newMems.Add(mi);
}
mems = newMems.ToArray();
}
}
_functions[cache] = res = BuiltinFunction.MakeMethod(pythonName, ReflectionUtils.GetMethodInfos(mems), type, ft);
}
}
}
return res;
}
private static Type GetCommonBaseType(Type xType, Type yType) {
if (xType.IsSubclassOf(yType)) {
return yType;
} else if (yType.IsSubclassOf(xType)) {
return xType;
} else if (xType == yType) {
return xType;
}
Type xBase = xType.GetBaseType();
Type yBase = yType.GetBaseType();
if (xBase != null) {
Type res = GetCommonBaseType(xBase, yType);
if (res != null) {
return res;
}
}
if (yBase != null) {
Type res = GetCommonBaseType(xType, yBase);
if (res != null) {
return res;
}
}
return null;
}
private static Type GetBaseDeclaringType(Type type, MemberInfo/*!*/[] mems) {
// get the base most declaring type, first sort the list so that
// the most derived class is at the beginning.
Array.Sort<MemberInfo>(mems, delegate(MemberInfo x, MemberInfo y) {
if (x.DeclaringType.IsSubclassOf(y.DeclaringType)) {
return -1;
} else if (y.DeclaringType.IsSubclassOf(x.DeclaringType)) {
return 1;
} else if (x.DeclaringType == y.DeclaringType) {
return 0;
}
// no relationship between these types, they should be base helper
// methods for two different types - for example object.MemberwiseClone for
// ExtensibleInt & object. We need to reset our type to the common base type.
type = GetCommonBaseType(x.DeclaringType, y.DeclaringType) ?? typeof(object);
// generic type definitions will have a null name.
if (x.DeclaringType.FullName == null) {
return -1;
} else if (y.DeclaringType.FullName == null) {
return 1;
}
return x.DeclaringType.FullName.CompareTo(y.DeclaringType.FullName);
});
// then if the provided type is a subclass of the most derived type
// then our declaring type is the methods declaring type.
foreach (MemberInfo mb in mems) {
// skip extension methods
if (mb.DeclaringType.IsAssignableFrom(type)) {
if (type == mb.DeclaringType || type.IsSubclassOf(mb.DeclaringType)) {
type = mb.DeclaringType;
break;
}
}
}
return type;
}
internal static ConstructorFunction GetConstructor(Type type, BuiltinFunction realTarget, params MethodBase[] mems) {
ConstructorFunction res = null;
if (mems.Length != 0) {
ReflectionCache.MethodBaseCache cache = new ReflectionCache.MethodBaseCache("__new__", mems);
lock (_ctors) {
if (!_ctors.TryGetValue(cache, out res)) {
_ctors[cache] = res = new ConstructorFunction(realTarget, mems);
}
}
}
return res;
}
internal static FunctionType GetMethodFunctionType(Type/*!*/ type, MemberInfo/*!*/[]/*!*/ methods) {
return GetMethodFunctionType(type, methods, true);
}
internal static FunctionType GetMethodFunctionType(Type/*!*/ type, MemberInfo/*!*/[]/*!*/ methods, bool checkStatic) {
FunctionType ft = FunctionType.None;
foreach (MethodInfo mi in methods) {
if (mi.IsStatic && mi.IsSpecialName) {
ParameterInfo[] pis = mi.GetParameters();
if ((pis.Length == 2 && pis[0].ParameterType != typeof(CodeContext)) ||
(pis.Length == 3 && pis[0].ParameterType == typeof(CodeContext))) {
ft |= FunctionType.BinaryOperator;
if (pis[pis.Length - 2].ParameterType != type && pis[pis.Length - 1].ParameterType == type) {
ft |= FunctionType.ReversedOperator;
}
}
}
if (checkStatic && IsStaticFunction(type, mi)) {
ft |= FunctionType.Function;
} else {
ft |= FunctionType.Method;
}
}
if (IsMethodAlwaysVisible(type, methods)) {
ft |= FunctionType.AlwaysVisible;
}
return ft;
}
/// <summary>
/// Checks to see if the provided members are always visible for the given type.
///
/// This filters out methods such as GetHashCode and Equals on standard .NET
/// types that we expose directly as Python types (e.g. object, string, etc...).
///
/// It also filters out the base helper overrides that are added for supporting
/// super calls on user defined types.
/// </summary>
private static bool IsMethodAlwaysVisible(Type/*!*/ type, MemberInfo/*!*/[]/*!*/ methods) {
bool alwaysVisible = true;
if (PythonBinder.IsPythonType(type)) {
// only show methods defined outside of the system types (object, string)
foreach (MethodInfo mi in methods) {
if (PythonBinder.IsExtendedType(mi.DeclaringType) ||
PythonBinder.IsExtendedType(mi.GetBaseDefinition().DeclaringType) ||
mi.IsDefined(typeof(PythonHiddenAttribute), false)) {
alwaysVisible = false;
break;
}
}
} else if (typeof(IPythonObject).IsAssignableFrom(type)) {
// check if this is a virtual override helper, if so we
// may need to filter it out.
foreach (MethodInfo mi in methods) {
if (PythonBinder.IsExtendedType(mi.DeclaringType)) {
alwaysVisible = false;
break;
}
}
}
return alwaysVisible;
}
/// <summary>
/// a function is static if it's a static .NET method and it's defined on the type or is an extension method
/// with StaticExtensionMethod decoration.
/// </summary>
private static bool IsStaticFunction(Type type, MethodInfo mi) {
return mi.IsStatic && // method must be truly static
!mi.IsDefined(typeof(WrapperDescriptorAttribute), false) && // wrapper descriptors are instance methods
(mi.DeclaringType.IsAssignableFrom(type) || mi.IsDefined(typeof(StaticExtensionMethodAttribute), false)); // or it's not an extension method or it's a static extension method
}
internal static PythonTypeSlot GetReflectedField(FieldInfo info) {
PythonTypeSlot res;
NameType nt = NameType.Field;
if (!PythonBinder.IsExtendedType(info.DeclaringType) &&
!info.IsDefined(typeof(PythonHiddenAttribute), false)) {
nt |= NameType.PythonField;
}
lock (_fieldCache) {
if (!_fieldCache.TryGetValue(info, out res)) {
if (nt == NameType.PythonField && info.IsLiteral) {
if (info.FieldType == typeof(int)) {
res = new PythonTypeUserDescriptorSlot(
ScriptingRuntimeHelpers.Int32ToObject((int)info.GetRawConstantValue()),
true
);
} else if (info.FieldType == typeof(bool)) {
res = new PythonTypeUserDescriptorSlot(
ScriptingRuntimeHelpers.BooleanToObject((bool)info.GetRawConstantValue()),
true
);
} else {
res = new PythonTypeUserDescriptorSlot(
info.GetValue(null),
true
);
}
} else {
res = new ReflectedField(info, nt);
}
_fieldCache[info] = res;
}
}
return res;
}
internal static string GetDocumentation(Type type) {
// Python documentation
object[] docAttr = type.GetCustomAttributes(typeof(DocumentationAttribute), false);
if (docAttr != null && docAttr.Length > 0) {
return ((DocumentationAttribute)docAttr[0]).Documentation;
}
if (type == typeof(DynamicNull)) return null;
// Auto Doc (XML or otherwise)
string autoDoc = DocBuilder.CreateAutoDoc(type);
if (autoDoc == null) {
autoDoc = String.Empty;
} else {
autoDoc += Environment.NewLine + Environment.NewLine;
}
// Simple generated helpbased on ctor, if available.
ConstructorInfo[] cis = type.GetConstructors();
foreach (ConstructorInfo ci in cis) {
autoDoc += FixCtorDoc(type, DocBuilder.CreateAutoDoc(ci, DynamicHelpers.GetPythonTypeFromType(type).Name, 0)) + Environment.NewLine;
}
return autoDoc;
}
private static string FixCtorDoc(Type type, string autoDoc) {
return autoDoc.Replace("__new__(cls)", DynamicHelpers.GetPythonTypeFromType(type).Name + "()").
Replace("__new__(cls, ", DynamicHelpers.GetPythonTypeFromType(type).Name + "(");
}
internal static ReflectedGetterSetter GetReflectedProperty(PropertyTracker pt, MemberGroup allProperties, bool privateBinding) {
ReflectedGetterSetter rp;
lock (_propertyCache) {
if (_propertyCache.TryGetValue(pt, out rp)) {
return rp;
}
NameType nt = NameType.PythonProperty;
MethodInfo getter = FilterProtectedGetterOrSetter(pt.GetGetMethod(true), privateBinding);
MethodInfo setter = FilterProtectedGetterOrSetter(pt.GetSetMethod(true), privateBinding);
if ((getter != null && getter.IsDefined(typeof(PythonHiddenAttribute), true)) ||
setter != null && setter.IsDefined(typeof(PythonHiddenAttribute), true)) {
nt = NameType.Property;
}
ExtensionPropertyTracker ept = pt as ExtensionPropertyTracker;
if (ept == null) {
ReflectedPropertyTracker rpt = pt as ReflectedPropertyTracker;
Debug.Assert(rpt != null);
if (PythonBinder.IsExtendedType(pt.DeclaringType) ||
rpt.Property.IsDefined(typeof(PythonHiddenAttribute), true)) {
nt = NameType.Property;
}
if (pt.GetIndexParameters().Length == 0) {
List<MethodInfo> getters = new List<MethodInfo>();
List<MethodInfo> setters = new List<MethodInfo>();
IList<ExtensionPropertyTracker> overriddenProperties = NewTypeMaker.GetOverriddenProperties((getter ?? setter).DeclaringType, pt.Name);
foreach (ExtensionPropertyTracker tracker in overriddenProperties) {
MethodInfo method = tracker.GetGetMethod(privateBinding);
if (method != null) {
getters.Add(method);
}
method = tracker.GetSetMethod(privateBinding);
if (method != null) {
setters.Add(method);
}
}
foreach (PropertyTracker propTracker in allProperties) {
MethodInfo method = propTracker.GetGetMethod(privateBinding);
if (method != null) {
getters.Add(method);
}
method = propTracker.GetSetMethod(privateBinding);
if (method != null) {
setters.Add(method);
}
}
rp = new ReflectedProperty(rpt.Property, getters.ToArray(), setters.ToArray(), nt);
} else {
rp = new ReflectedIndexer(((ReflectedPropertyTracker)pt).Property, NameType.Property, privateBinding);
}
} else {
rp = new ReflectedExtensionProperty(new ExtensionPropertyInfo(pt.DeclaringType, getter ?? setter), nt);
}
_propertyCache[pt] = rp;
return rp;
}
}
private static MethodInfo FilterProtectedGetterOrSetter(MethodInfo info, bool privateBinding) {
if (info != null) {
if (privateBinding || info.IsPublic) {
return info;
}
if (info.IsProtected()) {
return info;
}
}
return null;
}
internal static bool TryInvokeUnaryOperator(CodeContext context, object o, string name, out object value) {
PerfTrack.NoteEvent(PerfTrack.Categories.Temporary, "UnaryOp " + CompilerHelpers.GetType(o).Name + " " + name);
PythonTypeSlot pts;
PythonType pt = DynamicHelpers.GetPythonType(o);
object callable;
if (pt.TryResolveMixedSlot(context, name, out pts) &&
pts.TryGetValue(context, o, pt, out callable)) {
value = PythonCalls.Call(context, callable);
return true;
}
value = null;
return false;
}
internal static bool TryInvokeBinaryOperator(CodeContext context, object o, object arg1, string name, out object value) {
PerfTrack.NoteEvent(PerfTrack.Categories.Temporary, "BinaryOp " + CompilerHelpers.GetType(o).Name + " " + name);
PythonTypeSlot pts;
PythonType pt = DynamicHelpers.GetPythonType(o);
object callable;
if (pt.TryResolveMixedSlot(context, name, out pts) &&
pts.TryGetValue(context, o, pt, out callable)) {
value = PythonCalls.Call(context, callable, arg1);
return true;
}
value = null;
return false;
}
internal static bool TryInvokeTernaryOperator(CodeContext context, object o, object arg1, object arg2, string name, out object value) {
PerfTrack.NoteEvent(PerfTrack.Categories.Temporary, "TernaryOp " + CompilerHelpers.GetType(o).Name + " " + name);
PythonTypeSlot pts;
PythonType pt = DynamicHelpers.GetPythonType(o);
object callable;
if (pt.TryResolveMixedSlot(context, name, out pts) &&
pts.TryGetValue(context, o, pt, out callable)) {
value = PythonCalls.Call(context, callable, arg1, arg2);
return true;
}
value = null;
return false;
}
/// <summary>
/// If we have only interfaces, we'll need to insert object's base
/// </summary>
internal static PythonTuple EnsureBaseType(PythonTuple bases) {
bool hasInterface = false;
foreach (object baseClass in bases) {
if (baseClass is OldClass) continue;
PythonType dt = baseClass as PythonType;
if (!dt.UnderlyingSystemType.IsInterface()) {
return bases;
} else {
hasInterface = true;
}
}
if (hasInterface || bases.Count == 0) {
// We found only interfaces. We need do add System.Object to the bases
return new PythonTuple(bases, TypeCache.Object);
}
throw PythonOps.TypeError("a new-style class can't have only classic bases");
}
internal static Type GetFinalSystemType(Type type) {
while (typeof(IPythonObject).IsAssignableFrom(type) && !type.GetTypeInfo().IsDefined(typeof(DynamicBaseTypeAttribute), false)) {
type = type.GetBaseType();
}
return type;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Protocols.TestSuites.FileSharing.Common.Adapter;
using Microsoft.Protocols.TestTools;
using Microsoft.Protocols.TestTools.StackSdk;
using Microsoft.Protocols.TestTools.StackSdk.FileAccessService.Smb2;
using System;
namespace Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.ValidateNegotiateInfo
{
public class ValidateNegotiateInfoAdapter : ModelManagedAdapterBase, IValidateNegotiateInfoAdapter
{
#region Field
private Smb2FunctionalClient testClient;
private uint treeId;
private NEGOTIATE_Response negotiateResponse;
private ModelDialectRevision Connection_Dialect;
private Capabilities_Values Connection_ClientCapabilities;
private SecurityMode_Values Connection_ClientSecurityMode;
private Guid Connection_ClientGuid;
private ValidateNegotiateInfoConfig validateNegotiateInfoConfig;
#endregion
#region Events
public event ValidateNegotiateInfoResponseEventHandler ValidateNegotiateInfoResponse;
public event TerminateConnectionEventHandler TerminateConnection;
#endregion
#region Initialization
public override void Initialize(ITestSite testSite)
{
base.Initialize(testSite);
}
public override void Reset()
{
base.Reset();
if (testClient != null)
{
testClient.Disconnect();
testClient = null;
}
}
#endregion
#region Actions
public void ReadConfig(out ValidateNegotiateInfoConfig c)
{
// Server will terminate connection if Validate Negotiate Info Request is not signed.
testConfig.CheckSigning();
c = new ValidateNegotiateInfoConfig
{
Platform = testConfig.Platform,
ValidateNegotiateInfoSupported = testConfig.IsIoCtlCodeSupported(CtlCode_Values.FSCTL_VALIDATE_NEGOTIATE_INFO) ?
ValidateNegotiateInfoInServer.SupportValidateNegotiateInfo : ValidateNegotiateInfoInServer.NotSupportValidateNegotiateInfo
};
validateNegotiateInfoConfig = c;
}
/// <summary>
/// Negotiate, SessionSetup and TreeConnect
/// </summary>
public void SetupConnection(ModelDialectRevision dialect, ModelCapabilities capabilities, SecurityMode_Values securityMode)
{
#region Connect to server
testClient = new Smb2FunctionalClient(testConfig.Timeout, testConfig, this.Site);
testClient.ConnectToServer(testConfig.UnderlyingTransport, testConfig.SutComputerName, testConfig.SutIPAddress);
#endregion
// It MUST be a GUID generated by the client, if the Dialects field contains a value other than 0x0202. Otherwise, the client MUST set this to 0.
Guid clientGuid = (dialect == ModelDialectRevision.Smb2002) ? Guid.Empty : Guid.NewGuid();
#region negotiate
testClient.Negotiate(
Packet_Header_Flags_Values.NONE,
Smb2Utility.GetDialects(ModelUtility.GetDialectRevision(dialect)),
securityMode,
(Capabilities_Values)capabilities,
clientGuid,
(header, response) =>
{
Site.Assert.AreEqual(Smb2Status.STATUS_SUCCESS, header.Status, "{0} should succeed", header.Command);
negotiateResponse = response;
});
#endregion
#region session setup
testClient.SessionSetup(
testConfig.DefaultSecurityPackage,
testConfig.SutComputerName,
testConfig.AccountCredential,
testConfig.UseServerGssToken,
(SESSION_SETUP_Request_SecurityMode_Values)securityMode);
#endregion
#region treeconnect
testClient.TreeConnect(
Smb2Utility.GetUncPath(testConfig.SutComputerName, testConfig.BasicFileShare),
out treeId);
#endregion
Connection_Dialect = ModelUtility.GetModelDialectRevision(negotiateResponse.DialectRevision);
Connection_ClientCapabilities = (Capabilities_Values)capabilities;
Connection_ClientSecurityMode = securityMode;
Connection_ClientGuid = clientGuid;
}
/// <summary>
/// Send ValidateNegotiateInfoRequest to Server, fill in the fields according to params.
/// Verify the response.
/// </summary>
public void ValidateNegotiateInfoRequest(DialectType dialectType,
CapabilitiesType capabilitiesType,
SecurityModeType securityModeType,
ClientGuidType clientGuidType)
{
Capabilities_Values capbilities = Connection_ClientCapabilities;
if (capabilitiesType == CapabilitiesType.CapabilitiesDifferentFromNegotiate)
capbilities ^= Capabilities_Values.GLOBAL_CAP_DFS;
SecurityMode_Values securityMode = Connection_ClientSecurityMode;
if (securityModeType == SecurityModeType.SecurityModeDifferentFromNegotiate)
securityMode ^= SecurityMode_Values.NEGOTIATE_SIGNING_ENABLED;
Guid guid = clientGuidType == ClientGuidType.ClientGuidSameWithNegotiate ? Connection_ClientGuid : Guid.NewGuid();
DialectRevision[] dialects = null;
if (DialectType.None != dialectType)
{
ModelDialectRevision dialect = Connection_Dialect;
if (DialectType.DialectDifferentFromNegotiate == dialectType)
dialect = ModelDialectRevision.Smb30 == Connection_Dialect ? ModelDialectRevision.Smb21 : ModelDialectRevision.Smb30;
dialects = Smb2Utility.GetDialects(ModelUtility.GetDialectRevision(dialect));
}
else
dialects = new DialectRevision[] { 0 };
VALIDATE_NEGOTIATE_INFO_Request validateNegotiateInfoRequest;
validateNegotiateInfoRequest.Dialects = dialects;
validateNegotiateInfoRequest.DialectCount = (ushort)dialects.Length;
validateNegotiateInfoRequest.Capabilities = capbilities;
validateNegotiateInfoRequest.SecurityMode = securityMode;
validateNegotiateInfoRequest.Guid = guid;
Site.Log.Add(
LogEntryKind.Debug,
"Dialects in ValidateNegotiateInfoRequest: {0}", Smb2Utility.GetArrayString(validateNegotiateInfoRequest.Dialects));
Site.Log.Add(
LogEntryKind.Debug,
"DialectCount in ValidateNegotiateInfoRequest: {0}", validateNegotiateInfoRequest.DialectCount);
Site.Log.Add(
LogEntryKind.Debug,
"Capabilities in ValidateNegotiateInfoRequest: {0}", validateNegotiateInfoRequest.Capabilities);
Site.Log.Add(
LogEntryKind.Debug,
"SecurityMode in ValidateNegotiateInfoRequest: {0}", validateNegotiateInfoRequest.SecurityMode);
Site.Log.Add(
LogEntryKind.Debug,
"Guid in ValidateNegotiateInfoRequest: {0}", validateNegotiateInfoRequest.Guid);
byte[] inputBuffer = TypeMarshal.ToBytes<VALIDATE_NEGOTIATE_INFO_Request>(validateNegotiateInfoRequest);
byte[] outputBuffer;
try
{
uint status = testClient.ValidateNegotiateInfo(treeId, inputBuffer, out outputBuffer, checker: CheckIoCtlResponse);
if (Smb2Status.STATUS_SUCCESS == status)
{
VALIDATE_NEGOTIATE_INFO_Response validateNegotiateInfoResponse = TypeMarshal.ToStruct<VALIDATE_NEGOTIATE_INFO_Response>(outputBuffer);
Site.Assert.AreEqual(negotiateResponse.DialectRevision,
validateNegotiateInfoResponse.Dialect,
"Dialect in Negotiate response({0}) and ValidateNegotiateInfo response({1}) should be the same",
negotiateResponse.DialectRevision.ToString(),
validateNegotiateInfoResponse.Dialect.ToString());
Site.Assert.AreEqual((uint)negotiateResponse.Capabilities,
(uint)validateNegotiateInfoResponse.Capabilities,
"Capabilities in Negotiate response({0}) and ValidateNegotiateResponse({1}) should be the same",
negotiateResponse.Capabilities.ToString(),
validateNegotiateInfoResponse.Capabilities.ToString());
Site.Assert.AreEqual((ushort)negotiateResponse.SecurityMode,
(ushort)validateNegotiateInfoResponse.SecurityMode,
"SecurityMode in Negotiate response({0}) and ValidateNegotiateInfo response({1}) should be the same",
negotiateResponse.SecurityMode.ToString(),
validateNegotiateInfoResponse.SecurityMode.ToString());
Site.Assert.AreEqual(negotiateResponse.ServerGuid,
validateNegotiateInfoResponse.Guid,
"ClientGuid in Negotiate response({0}) and ValidateNegotiateInfo response({1}) should be the same",
negotiateResponse.ServerGuid.ToString(),
validateNegotiateInfoResponse.Guid.ToString());
}
testClient.TreeDisconnect(treeId);
testClient.LogOff();
testClient.Disconnect();
this.ValidateNegotiateInfoResponse((ModelSmb2Status)status, validateNegotiateInfoConfig);
return;
}
catch
{
}
Site.Assert.IsTrue(testClient.Smb2Client.IsServerDisconnected, "ValidateNegotiationInfo failure should be caused by transport connection termination");
TerminateConnection();
}
#endregion
private void CheckIoCtlResponse(Packet_Header header, IOCTL_Response response)
{
if (header.Status != Smb2Status.STATUS_SUCCESS) return;
FILEID ioCtlFileId;
ioCtlFileId.Persistent = 0xFFFFFFFFFFFFFFFF;
ioCtlFileId.Volatile = 0xFFFFFFFFFFFFFFFF;
Site.Assert.AreEqual(ioCtlFileId, response.FileId, "FileId MUST be set to { 0xFFFFFFFFFFFFFFFF, 0xFFFFFFFFFFFFFFFF }.");
Site.Assert.AreEqual(0, (int)response.Flags, "Flags MUST be set to zero.");
Site.Assert.AreEqual((uint)CtlCode_Values.FSCTL_VALIDATE_NEGOTIATE_INFO, response.CtlCode, "CtlCode MUST be set to FSCTL_VALIDATE_NEGOTIATE_INFO.");
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.Globalization;
using System.Runtime.Serialization;
namespace System.Drawing.Printing
{
/// <summary>
/// Specifies the margins of a printed page.
/// </summary>
#if NETCOREAPP
[TypeConverter("System.Drawing.Printing.MarginsConverter, System.Windows.Extensions, Version=4.0.0.0, Culture=neutral, PublicKeyToken=cc7b13ffcd2ddd51")]
#endif
public partial class Margins : ICloneable
{
private int _left;
private int _right;
private int _bottom;
private int _top;
[OptionalField]
private double _doubleLeft;
[OptionalField]
private double _doubleRight;
[OptionalField]
private double _doubleTop;
[OptionalField]
private double _doubleBottom;
/// <summary>
/// Initializes a new instance of a the <see cref='Margins'/> class with one-inch margins.
/// </summary>
public Margins() : this(100, 100, 100, 100)
{
}
/// <summary>
/// Initializes a new instance of a the <see cref='Margins'/> class with the specified left, right, top, and bottom margins.
/// </summary>
public Margins(int left, int right, int top, int bottom)
{
CheckMargin(left, nameof(left));
CheckMargin(right, nameof(right));
CheckMargin(top, nameof(top));
CheckMargin(bottom, nameof(bottom));
_left = left;
_right = right;
_top = top;
_bottom = bottom;
_doubleLeft = (double)left;
_doubleRight = (double)right;
_doubleTop = (double)top;
_doubleBottom = (double)bottom;
}
/// <summary>
/// Gets or sets the left margin, in hundredths of an inch.
/// </summary>
public int Left
{
get => _left;
set
{
CheckMargin(value, nameof(value));
_left = value;
_doubleLeft = (double)value;
}
}
/// <summary>
/// Gets or sets the right margin, in hundredths of an inch.
/// </summary>
public int Right
{
get => _right;
set
{
CheckMargin(value, nameof(value));
_right = value;
_doubleRight = (double)value;
}
}
/// <summary>
/// Gets or sets the top margin, in hundredths of an inch.
/// </summary>
public int Top
{
get => _top;
set
{
CheckMargin(value, nameof(value));
_top = value;
_doubleTop = (double)value;
}
}
/// <summary>
/// Gets or sets the bottom margin, in hundredths of an inch.
/// </summary>
public int Bottom
{
get => _bottom;
set
{
CheckMargin(value, nameof(value));
_bottom = value;
_doubleBottom = (double)value;
}
}
/// <summary>
/// Gets or sets the left margin with double value, in hundredths of an inch.
/// When use the setter, the ranger of setting double value should between
/// 0 to Int.MaxValue;
/// </summary>
internal double DoubleLeft
{
get => _doubleLeft;
set
{
Left = (int)Math.Round(value);
_doubleLeft = value;
}
}
/// <summary>
/// Gets or sets the right margin with double value, in hundredths of an inch.
/// When use the setter, the ranger of setting double value should between
/// 0 to Int.MaxValue;
/// </summary>
internal double DoubleRight
{
get => _doubleRight;
set
{
Right = (int)Math.Round(value);
_doubleRight = value;
}
}
/// <summary>
/// Gets or sets the top margin with double value, in hundredths of an inch.
/// When use the setter, the ranger of setting double value should between
/// 0 to Int.MaxValue;
/// </summary>
internal double DoubleTop
{
get => _doubleTop;
set
{
Top = (int)Math.Round(value);
_doubleTop = value;
}
}
/// <summary>
/// Gets or sets the bottom margin with double value, in hundredths of an inch.
/// When use the setter, the ranger of setting double value should between
/// 0 to Int.MaxValue;
/// </summary>
internal double DoubleBottom
{
get => _doubleBottom;
set
{
Bottom = (int)Math.Round(value);
_doubleBottom = value;
}
}
private void CheckMargin(int margin, string name)
{
if (margin < 0)
{
throw new ArgumentOutOfRangeException(name, margin, SR.Format(SR.InvalidLowBoundArgumentEx, name, margin, 0));
}
}
/// <summary>
/// Retrieves a duplicate of this object, member by member.
/// </summary>
public object Clone() => MemberwiseClone();
/// <summary>
/// Compares this <see cref='Margins'/> to a specified <see cref='Margins'/> to see whether they
/// are equal.
/// </summary>
public override bool Equals(object obj)
{
if (!(obj is Margins margins))
{
return false;
}
return margins.Left == Left
&& margins.Right == Right
&& margins.Top == Top
&& margins.Bottom == Bottom;
}
/// <summary>
/// Calculates and retrieves a hash code based on the left, right, top, and bottom margins.
/// </summary>
public override int GetHashCode() => HashCode.Combine(Left, Right, Top, Bottom);
/// <summary>
/// Tests whether two <see cref='Margins'/> objects are identical.
/// </summary>
public static bool operator ==(Margins m1, Margins m2)
{
if (m1 is null)
{
return m2 is null;
}
if (m2 is null)
{
return false;
}
return m1.Equals(m2);
}
/// <summary>
/// Tests whether two <see cref='Margins'/> objects are different.
/// </summary>
public static bool operator !=(Margins m1, Margins m2) => !(m1 == m2);
/// <summary>
/// Provides some interesting information for the Margins in String form.
/// </summary>
public override string ToString()
{
return "[Margins"
+ " Left=" + Left.ToString(CultureInfo.InvariantCulture)
+ " Right=" + Right.ToString(CultureInfo.InvariantCulture)
+ " Top=" + Top.ToString(CultureInfo.InvariantCulture)
+ " Bottom=" + Bottom.ToString(CultureInfo.InvariantCulture)
+ "]";
}
}
}
| |
using System;
using System.Drawing;
using System.Xml.Serialization;
namespace ActiveWare.CSS {
/// <summary>part of a property's value</summary>
public class Term {
private char? seperator;
private char? sign;
private TermType type;
private string val;
private Unit? unit;
private Function function;
/// <summary></summary>
[XmlAttribute("seperator")]
public char? Seperator {
get { return seperator; }
set { seperator = value; }
}
/// <summary></summary>
[XmlAttribute("sign")]
public char? Sign {
get { return sign; }
set { sign = value; }
}
/// <summary></summary>
[XmlAttribute("type")]
public TermType Type {
get { return type; }
set { type = value; }
}
/// <summary></summary>
[XmlAttribute("value")]
public string Value {
get { return val; }
set { val = value; }
}
/// <summary></summary>
[XmlAttribute("unit")]
public Unit? Unit {
get { return unit; }
set { unit = value; }
}
/// <summary></summary>
[XmlElement("Function")]
public Function Function {
get { return function; }
set { function = value; }
}
/// <summary></summary>
/// <returns></returns>
public override string ToString() {
/*
term<out Term trm> = (. trm = new Term();
string val = "";
Function func = null;
.)
[ ('-' (. trm.Sign = '-'; .)
| '+' (. trm.Sign = '+'; .)
) ]
(
{ digit (. val += t.val; .)
}
[ (
"%" (. trm.Unit = Unit.Percent; .)
| "ex" (. trm.Unit = Unit.EX; .)
| "em" (. trm.Unit = Unit.EM; .)
| "px" (. trm.Unit = Unit.PX; .)
| "cm" (. trm.Unit = Unit.CM; .)
| "mm" (. trm.Unit = Unit.MM; .)
| "pc" (. trm.Unit = Unit.PC; .)
| "in" (. trm.Unit = Unit.IN; .)
| "pt" (. trm.Unit = Unit.PT; .)
| "deg" (. trm.Unit = Unit.DEG; .)
| ["g" (. trm.Unit = Unit.GRAD; .)
] "rad" (. if (trm.Unit != Unit.GRAD) { trm.Unit = Unit.RAD; } .)
| ["m" (. trm.Unit = Unit.MS; .)
] "s" (. if (trm.Unit != Unit.MS) { trm.Unit = Unit.S; } .)
| ["k" (. trm.Unit = Unit.KHZ; .)
] "hz" (. if (trm.Unit != Unit.KHZ) { trm.Unit = Unit.HZ; } .)
) ] (. trm.Value = val; trm.Type = TermType.Number; .)
|
function<out func> (. trm.Function = func; trm.Type = TermType.Function; .)
|
QuotedString<out val> (. trm.Value = val; trm.Type = TermType.String; .)
|
ident (. trm.Value = t.val; trm.Type = TermType.String; .)
|
URI<out val> (. trm.Value = val; trm.Type = TermType.Url; .)
|
"U\\"
{ (digit|'A'|'B'|'C'|'D'|'E'|'F'|'a'|'b'|'c'|'d'|'e'|'f')
(. val += t.val; .)
} (. trm.Value = val; trm.Type = TermType.Unicode; .)
|
hexdigit (. trm.Value = t.val; trm.Type = TermType.Hex; .)
)
.
*/
System.Text.StringBuilder txt = new System.Text.StringBuilder();
//if (seperator.HasValue) { txt.Append(seperator.Value); txt.Append(" "); }
if (type == TermType.Function) {
txt.Append(function.ToString());
} else if (type == TermType.Url) {
txt.AppendFormat("url('{0}')", val);
} else if (type == TermType.Unicode) {
txt.AppendFormat("U\\{0}", val.ToUpper());
} else if (type == TermType.Hex) {
txt.Append(val.ToUpper());
} else {
if (sign.HasValue) { txt.Append(sign.Value); }
txt.Append(val);
if (unit.HasValue) {
if (unit.Value == ActiveWare.CSS.Unit.Percent) {
txt.Append("%");
} else {
txt.Append(UnitOutput.ToString(unit.Value));
}
}
}
return txt.ToString();
}
public bool IsColor {
get {
if (((type == TermType.Hex) || (type == TermType.String && val.StartsWith("#")))
&& (val.Length == 6 || val.Length == 3 || ((val.Length == 7 || val.Length == 4)
&& val.StartsWith("#")))) {
bool hex = true;
foreach (char c in val) {
if (!char.IsDigit(c) && c != '#'
&& c != 'a' && c != 'A'
&& c != 'b' && c != 'B'
&& c != 'c' && c != 'C'
&& c != 'd' && c != 'D'
&& c != 'e' && c != 'E'
&& c != 'f' && c != 'F'
) {
return false;
}
}
return hex;
} else if (type == TermType.String) {
bool number = true;
foreach (char c in val) {
if (!char.IsDigit(c)) {
number = false;
break;
}
}
if (number) { return false; }
try {
KnownColor kc = (KnownColor)Enum.Parse(typeof(KnownColor), val, true);
return true;
} catch { }
} else if (type == TermType.Function) {
/*
// 0-255, 0-1
rgb(255,0,0)
rgba(255,0,0,1)
// 0-100%, 0-1
rgb(100%,0%,0%)
rgba(100%,0%,0%,1)
// 0-360, 0-100%, 0-100%, 0-1
hsl(0, 100%, 50%)
hsl(120, 75%, 75%)
hsla(240, 100%, 50%, 0.5)
hsla(30, 100%, 50%, 0.1)
*/
if ((function.Name.ToLower().Equals("rgb") && function.Expression.Terms.Count == 3)
|| (function.Name.ToLower().Equals("rgba") && function.Expression.Terms.Count == 4)
) {
for (int i = 0; i < function.Expression.Terms.Count; i++) {
if (function.Expression.Terms[i].Type != TermType.Number) { return false; }
}
return true;
} else if ((function.Name.ToLower().Equals("hsl") && function.Expression.Terms.Count == 3)
|| (function.Name.ToLower().Equals("hsla") && function.Expression.Terms.Count == 4)
) {
for (int i = 0; i < function.Expression.Terms.Count; i++) {
if (function.Expression.Terms[i].Type != TermType.Number) { return false; }
}
return true;
}
}
return false;
}
}
private int GetRGBValue(Term t) {
try {
if (t.Unit.HasValue && t.Unit.Value == ActiveWare.CSS.Unit.Percent) {
return (int)(255f * float.Parse(t.Value) / 100f);
}
return int.Parse(t.Value);
} catch {}
return 0;
}
private int GetHueValue(Term t) {
// 0 - 360
try {
return (int)(float.Parse(t.Value) * 255f / 360f);
} catch {}
return 0;
}
public Color ToColor() {
string hex = "000000";
if (type == TermType.Hex) {
if ((val.Length == 7 || val.Length == 4) && val.StartsWith("#")) {
hex = val.Substring(1);
} else if (val.Length == 6 || val.Length == 3) {
hex = val;
}
} else if (type == TermType.Function) {
if ((function.Name.ToLower().Equals("rgb") && function.Expression.Terms.Count == 3)
|| (function.Name.ToLower().Equals("rgba") && function.Expression.Terms.Count == 4)
) {
int fr = 0, fg = 0, fb = 0;
for (int i = 0; i < function.Expression.Terms.Count; i++) {
if (function.Expression.Terms[i].Type != TermType.Number) { return Color.Black; }
switch (i) {
case 0: fr = GetRGBValue(function.Expression.Terms[i]); break;
case 1: fg = GetRGBValue(function.Expression.Terms[i]); break;
case 2: fb = GetRGBValue(function.Expression.Terms[i]); break;
}
}
return Color.FromArgb(fr, fg, fb);
} else if ((function.Name.ToLower().Equals("hsl") && function.Expression.Terms.Count == 3)
|| (function.Name.Equals("hsla") && function.Expression.Terms.Count == 4)
) {
int h = 0, s = 0, v = 0;
for (int i = 0; i < function.Expression.Terms.Count; i++) {
if (function.Expression.Terms[i].Type != TermType.Number) { return Color.Black; }
switch (i) {
case 0: h = GetHueValue(function.Expression.Terms[i]); break;
case 1: s = GetRGBValue(function.Expression.Terms[i]); break;
case 2: v = GetRGBValue(function.Expression.Terms[i]); break;
}
}
HSV hsv = new HSV(h, s, v);
return hsv.Color;
}
} else {
try {
KnownColor kc = (KnownColor)Enum.Parse(typeof(KnownColor), val, true);
Color c = Color.FromKnownColor(kc);
return c;
} catch { }
}
if (hex.Length == 3) {
string temp = "";
foreach (char c in hex) {
temp += c.ToString() + c.ToString();
}
hex = temp;
}
int r = DeHex(hex.Substring(0, 2));
int g = DeHex(hex.Substring(2, 2));
int b = DeHex(hex.Substring(4));
return Color.FromArgb(r, g, b);
}
private int DeHex(string input) {
int val;
int result = 0;
for (int i = 0; i < input.Length; i++) {
string chunk = input.Substring(i, 1).ToUpper();
switch (chunk) {
case "A":
val = 10; break;
case "B":
val = 11; break;
case "C":
val = 12; break;
case "D":
val = 13; break;
case "E":
val = 14; break;
case "F":
val = 15; break;
default:
val = int.Parse(chunk); break;
}
if (i == 0) {
result += val * 16;
} else {
result += val;
}
}
return result;
}
}
}
| |
using System;
using System.Linq;
using System.Threading.Tasks;
using Abp.Collections.Extensions;
using Abp.Dependency;
using Abp.Localization;
using Abp.Threading;
namespace Abp.Authorization
{
/// <summary>
/// Extension methods for <see cref="IPermissionChecker"/>
/// </summary>
public static class PermissionCheckerExtensions
{
/// <summary>
/// Checks if current user is granted for a permission.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="permissionName">Name of the permission</param>
public static bool IsGranted(this IPermissionChecker permissionChecker, string permissionName)
{
return AsyncHelper.RunSync(() => permissionChecker.IsGrantedAsync(permissionName));
}
/// <summary>
/// Checks if a user is granted for a permission.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="user">User to check</param>
/// <param name="permissionName">Name of the permission</param>
public static bool IsGranted(this IPermissionChecker permissionChecker, UserIdentifier user, string permissionName)
{
return AsyncHelper.RunSync(() => permissionChecker.IsGrantedAsync(user, permissionName));
}
/// <summary>
/// Checks if given user is granted for given permission.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="user">User</param>
/// <param name="requiresAll">True, to require all given permissions are granted. False, to require one or more.</param>
/// <param name="permissionNames">Name of the permissions</param>
public static bool IsGranted(this IPermissionChecker permissionChecker, UserIdentifier user, bool requiresAll, params string[] permissionNames)
{
return AsyncHelper.RunSync(() => IsGrantedAsync(permissionChecker, user, requiresAll, permissionNames));
}
/// <summary>
/// Checks if given user is granted for given permission.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="user">User</param>
/// <param name="requiresAll">True, to require all given permissions are granted. False, to require one or more.</param>
/// <param name="permissionNames">Name of the permissions</param>
public static async Task<bool> IsGrantedAsync(this IPermissionChecker permissionChecker, UserIdentifier user, bool requiresAll, params string[] permissionNames)
{
if (permissionNames.IsNullOrEmpty())
{
return true;
}
if (requiresAll)
{
foreach (var permissionName in permissionNames)
{
if (!(await permissionChecker.IsGrantedAsync(user, permissionName)))
{
return false;
}
}
return true;
}
else
{
foreach (var permissionName in permissionNames)
{
if (await permissionChecker.IsGrantedAsync(user, permissionName))
{
return true;
}
}
return false;
}
}
/// <summary>
/// Checks if current user is granted for given permission.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="requiresAll">True, to require all given permissions are granted. False, to require one or more.</param>
/// <param name="permissionNames">Name of the permissions</param>
public static bool IsGranted(this IPermissionChecker permissionChecker, bool requiresAll, params string[] permissionNames)
{
return AsyncHelper.RunSync(() => IsGrantedAsync(permissionChecker, requiresAll, permissionNames));
}
/// <summary>
/// Checks if current user is granted for given permission.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="requiresAll">True, to require all given permissions are granted. False, to require one or more.</param>
/// <param name="permissionNames">Name of the permissions</param>
public static async Task<bool> IsGrantedAsync(this IPermissionChecker permissionChecker, bool requiresAll, params string[] permissionNames)
{
if (permissionNames.IsNullOrEmpty())
{
return true;
}
if (requiresAll)
{
foreach (var permissionName in permissionNames)
{
if (!(await permissionChecker.IsGrantedAsync(permissionName)))
{
return false;
}
}
return true;
}
else
{
foreach (var permissionName in permissionNames)
{
if (await permissionChecker.IsGrantedAsync(permissionName))
{
return true;
}
}
return false;
}
}
/// <summary>
/// Authorizes current user for given permission or permissions,
/// throws <see cref="AbpAuthorizationException"/> if not authorized.
/// User it authorized if any of the <see cref="permissionNames"/> are granted.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="permissionNames">Name of the permissions to authorize</param>
/// <exception cref="AbpAuthorizationException">Throws authorization exception if</exception>
public static void Authorize(this IPermissionChecker permissionChecker, params string[] permissionNames)
{
Authorize(permissionChecker, false, permissionNames);
}
/// <summary>
/// Authorizes current user for given permission or permissions,
/// throws <see cref="AbpAuthorizationException"/> if not authorized.
/// User it authorized if any of the <see cref="permissionNames"/> are granted.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="requireAll">
/// If this is set to true, all of the <see cref="permissionNames"/> must be granted.
/// If it's false, at least one of the <see cref="permissionNames"/> must be granted.
/// </param>
/// <param name="permissionNames">Name of the permissions to authorize</param>
/// <exception cref="AbpAuthorizationException">Throws authorization exception if</exception>
public static void Authorize(this IPermissionChecker permissionChecker, bool requireAll, params string[] permissionNames)
{
AsyncHelper.RunSync(() => AuthorizeAsync(permissionChecker, requireAll, permissionNames));
}
/// <summary>
/// Authorizes current user for given permission or permissions,
/// throws <see cref="AbpAuthorizationException"/> if not authorized.
/// User it authorized if any of the <see cref="permissionNames"/> are granted.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="permissionNames">Name of the permissions to authorize</param>
/// <exception cref="AbpAuthorizationException">Throws authorization exception if</exception>
public static Task AuthorizeAsync(this IPermissionChecker permissionChecker, params string[] permissionNames)
{
return AuthorizeAsync(permissionChecker, false, permissionNames);
}
/// <summary>
/// Authorizes current user for given permission or permissions,
/// throws <see cref="AbpAuthorizationException"/> if not authorized.
/// </summary>
/// <param name="permissionChecker">Permission checker</param>
/// <param name="requireAll">
/// If this is set to true, all of the <see cref="permissionNames"/> must be granted.
/// If it's false, at least one of the <see cref="permissionNames"/> must be granted.
/// </param>
/// <param name="permissionNames">Name of the permissions to authorize</param>
/// <exception cref="AbpAuthorizationException">Throws authorization exception if</exception>
public static async Task AuthorizeAsync(this IPermissionChecker permissionChecker, bool requireAll, params string[] permissionNames)
{
if (await IsGrantedAsync(permissionChecker, requireAll, permissionNames))
{
return;
}
var localizedPermissionNames = LocalizePermissionNames(permissionChecker, permissionNames);
if (requireAll)
{
throw new AbpAuthorizationException(
string.Format(
L(
permissionChecker,
"AllOfThesePermissionsMustBeGranted",
"Required permissions are not granted. All of these permissions must be granted: {0}"
),
string.Join(", ", localizedPermissionNames)
)
);
}
else
{
throw new AbpAuthorizationException(
string.Format(
L(
permissionChecker,
"AtLeastOneOfThesePermissionsMustBeGranted",
"Required permissions are not granted. At least one of these permissions must be granted: {0}"
),
string.Join(", ", localizedPermissionNames)
)
);
}
}
public static string L(IPermissionChecker permissionChecker, string name, string defaultValue)
{
if (!(permissionChecker is IIocManagerAccessor))
{
return defaultValue;
}
var iocManager = (permissionChecker as IIocManagerAccessor).IocManager;
using (var localizationManager = iocManager.ResolveAsDisposable<ILocalizationManager>())
{
return localizationManager.Object.GetString(AbpConsts.LocalizationSourceName, name);
}
}
public static string[] LocalizePermissionNames(IPermissionChecker permissionChecker, string[] permissionNames)
{
if (!(permissionChecker is IIocManagerAccessor))
{
return permissionNames;
}
var iocManager = (permissionChecker as IIocManagerAccessor).IocManager;
using (var localizationContext = iocManager.ResolveAsDisposable<ILocalizationContext>())
{
using (var permissionManager = iocManager.ResolveAsDisposable<IPermissionManager>())
{
return permissionNames.Select(permissionName =>
{
var permission = permissionManager.Object.GetPermissionOrNull(permissionName);
return permission == null
? permissionName
: permission.DisplayName.Localize(localizationContext.Object);
}).ToArray();
}
}
}
}
}
| |
using System;
using System.Diagnostics;
using System.Text;
using u8 = System.Byte;
using u32 = System.UInt32;
namespace Community.CsharpSqlite
{
public partial class Sqlite3
{
/*
** 2001 September 15
**
** The author disclaims copyright to this source code. In place of
** a legal notice, here is a blessing:
**
** May you do good and not evil.
** May you find forgiveness for yourself and forgive others.
** May you share freely, never taking more than you give.
**
*************************************************************************
** This file contains C code routines that are called by the parser
** in order to generate code for DELETE FROM statements.
*************************************************************************
** Included in SQLite3 port to C#-SQLite; 2008 Noah B Hart
** C#-SQLite is an independent reimplementation of the SQLite software library
**
** SQLITE_SOURCE_ID: 2010-01-05 15:30:36 28d0d7710761114a44a1a3a425a6883c661f06e7
**
** $Header$
*************************************************************************
*/
//#include "sqliteInt.h"
/*
** Look up every table that is named in pSrc. If any table is not found,
** add an error message to pParse.zErrMsg and return NULL. If all tables
** are found, return a pointer to the last table.
*/
static Table sqlite3SrcListLookup( Parse pParse, SrcList pSrc )
{
SrcList_item pItem = pSrc.a[0];
Table pTab;
Debug.Assert( pItem != null && pSrc.nSrc == 1 );
pTab = sqlite3LocateTable( pParse, 0, pItem.zName, pItem.zDatabase );
sqlite3DeleteTable( ref pItem.pTab );
pItem.pTab = pTab;
if ( pTab != null )
{
pTab.nRef++;
}
if ( sqlite3IndexedByLookup( pParse, pItem ) != 0 )
{
pTab = null;
}
return pTab;
}
/*
** Check to make sure the given table is writable. If it is not
** writable, generate an error message and return 1. If it is
** writable return 0;
*/
static bool sqlite3IsReadOnly( Parse pParse, Table pTab, int viewOk )
{
/* A table is not writable under the following circumstances:
**
** 1) It is a virtual table and no implementation of the xUpdate method
** has been provided, or
** 2) It is a system table (i.e. sqlite_master), this call is not
** part of a nested parse and writable_schema pragma has not
** been specified.
**
** In either case leave an error message in pParse and return non-zero.
*/
if (
( IsVirtual( pTab )
&& sqlite3GetVTable( pParse.db, pTab ).pMod.pModule.xUpdate == null )
|| ( ( pTab.tabFlags & TF_Readonly ) != 0
&& ( pParse.db.flags & SQLITE_WriteSchema ) == 0
&& pParse.nested == 0 )
)
{
sqlite3ErrorMsg( pParse, "table %s may not be modified", pTab.zName );
return true;
}
#if !SQLITE_OMIT_VIEW
if ( viewOk == 0 && pTab.pSelect != null )
{
sqlite3ErrorMsg( pParse, "cannot modify %s because it is a view", pTab.zName );
return true;
}
#endif
return false;
}
#if !SQLITE_OMIT_VIEW && !SQLITE_OMIT_TRIGGER
/*
** Evaluate a view and store its result in an ephemeral table. The
** pWhere argument is an optional WHERE clause that restricts the
** set of rows in the view that are to be added to the ephemeral table.
*/
static void sqlite3MaterializeView(
Parse pParse, /* Parsing context */
Table pView, /* View definition */
Expr pWhere, /* Optional WHERE clause to be added */
int iCur /* VdbeCursor number for ephemerial table */
)
{
SelectDest dest = new SelectDest();
Select pDup;
sqlite3 db = pParse.db;
pDup = sqlite3SelectDup( db, pView.pSelect, 0 );
if ( pWhere != null )
{
SrcList pFrom;
pWhere = sqlite3ExprDup( db, pWhere, 0 );
pFrom = sqlite3SrcListAppend( db, null, null, null );
//if ( pFrom != null )
//{
Debug.Assert( pFrom.nSrc == 1 );
pFrom.a[0].zAlias = pView.zName;// sqlite3DbStrDup( db, pView.zName );
pFrom.a[0].pSelect = pDup;
Debug.Assert( pFrom.a[0].pOn == null );
Debug.Assert( pFrom.a[0].pUsing == null );
//}
//else
//{
// sqlite3SelectDelete( db, ref pDup );
//}
pDup = sqlite3SelectNew( pParse, null, pFrom, pWhere, null, null, null, 0, null, null );
}
sqlite3SelectDestInit( dest, SRT_EphemTab, iCur );
sqlite3Select( pParse, pDup, ref dest );
sqlite3SelectDelete( db, ref pDup );
}
#endif //* !SQLITE_OMIT_VIEW) && !SQLITE_OMIT_TRIGGER) */
#if (SQLITE_ENABLE_UPDATE_DELETE_LIMIT) && !(SQLITE_OMIT_SUBQUERY)
/*
** Generate an expression tree to implement the WHERE, ORDER BY,
** and LIMIT/OFFSET portion of DELETE and UPDATE statements.
**
** DELETE FROM table_wxyz WHERE a<5 ORDER BY a LIMIT 1;
** \__________________________/
** pLimitWhere (pInClause)
*/
Expr sqlite3LimitWhere(
Parse pParse, /* The parser context */
SrcList pSrc, /* the FROM clause -- which tables to scan */
Expr pWhere, /* The WHERE clause. May be null */
ExprList pOrderBy, /* The ORDER BY clause. May be null */
Expr pLimit, /* The LIMIT clause. May be null */
Expr pOffset, /* The OFFSET clause. May be null */
char zStmtType /* Either DELETE or UPDATE. For error messages. */
){
Expr pWhereRowid = null; /* WHERE rowid .. */
Expr pInClause = null; /* WHERE rowid IN ( select ) */
Expr pSelectRowid = null; /* SELECT rowid ... */
ExprList pEList = null; /* Expression list contaning only pSelectRowid */
SrcList pSelectSrc = null; /* SELECT rowid FROM x ... (dup of pSrc) */
Select pSelect = null; /* Complete SELECT tree */
/* Check that there isn't an ORDER BY without a LIMIT clause.
*/
if( pOrderBy!=null && (pLimit == null) ) {
sqlite3ErrorMsg(pParse, "ORDER BY without LIMIT on %s", zStmtType);
pParse.parseError = 1;
goto limit_where_cleanup_2;
}
/* We only need to generate a select expression if there
** is a limit/offset term to enforce.
*/
if ( pLimit == null )
{
/* if pLimit is null, pOffset will always be null as well. */
Debug.Assert( pOffset == null );
return pWhere;
}
/* Generate a select expression tree to enforce the limit/offset
** term for the DELETE or UPDATE statement. For example:
** DELETE FROM table_a WHERE col1=1 ORDER BY col2 LIMIT 1 OFFSET 1
** becomes:
** DELETE FROM table_a WHERE rowid IN (
** SELECT rowid FROM table_a WHERE col1=1 ORDER BY col2 LIMIT 1 OFFSET 1
** );
*/
pSelectRowid = sqlite3PExpr( pParse, TK_ROW, null, null, null );
if( pSelectRowid == null ) goto limit_where_cleanup_2;
pEList = sqlite3ExprListAppend( pParse, null, pSelectRowid);
if( pEList == null ) goto limit_where_cleanup_2;
/* duplicate the FROM clause as it is needed by both the DELETE/UPDATE tree
** and the SELECT subtree. */
pSelectSrc = sqlite3SrcListDup(pParse.db, pSrc,0);
if( pSelectSrc == null ) {
sqlite3ExprListDelete(pParse.db, pEList);
goto limit_where_cleanup_2;
}
/* generate the SELECT expression tree. */
pSelect = sqlite3SelectNew( pParse, pEList, pSelectSrc, pWhere, null, null,
pOrderBy, 0, pLimit, pOffset );
if( pSelect == null ) return null;
/* now generate the new WHERE rowid IN clause for the DELETE/UDPATE */
pWhereRowid = sqlite3PExpr( pParse, TK_ROW, null, null, null );
if( pWhereRowid == null ) goto limit_where_cleanup_1;
pInClause = sqlite3PExpr( pParse, TK_IN, pWhereRowid, null, null );
if( pInClause == null ) goto limit_where_cleanup_1;
pInClause->x.pSelect = pSelect;
pInClause->flags |= EP_xIsSelect;
sqlite3ExprSetHeight(pParse, pInClause);
return pInClause;
/* something went wrong. clean up anything allocated. */
limit_where_cleanup_1:
sqlite3SelectDelete(pParse.db, pSelect);
return null;
limit_where_cleanup_2:
sqlite3ExprDelete(pParse.db, ref pWhere);
sqlite3ExprListDelete(pParse.db, pOrderBy);
sqlite3ExprDelete(pParse.db, ref pLimit);
sqlite3ExprDelete(pParse.db, ref pOffset);
return null;
}
#endif //* defined(SQLITE_ENABLE_UPDATE_DELETE_LIMIT) && !defined(SQLITE_OMIT_SUBQUERY) */
/*
** Generate code for a DELETE FROM statement.
**
** DELETE FROM table_wxyz WHERE a<5 AND b NOT NULL;
** \________/ \________________/
** pTabList pWhere
*/
static void sqlite3DeleteFrom(
Parse pParse, /* The parser context */
SrcList pTabList, /* The table from which we should delete things */
Expr pWhere /* The WHERE clause. May be null */
)
{
Vdbe v; /* The virtual database engine */
Table pTab; /* The table from which records will be deleted */
string zDb; /* Name of database holding pTab */
int end, addr = 0; /* A couple addresses of generated code */
int i; /* Loop counter */
WhereInfo pWInfo; /* Information about the WHERE clause */
Index pIdx; /* For looping over indices of the table */
int iCur; /* VDBE VdbeCursor number for pTab */
sqlite3 db; /* Main database structure */
AuthContext sContext; /* Authorization context */
NameContext sNC; /* Name context to resolve expressions in */
int iDb; /* Database number */
int memCnt = -1; /* Memory cell used for change counting */
int rcauth; /* Value returned by authorization callback */
#if !SQLITE_OMIT_TRIGGER
bool isView; /* True if attempting to delete from a view */
Trigger pTrigger; /* List of table triggers, if required */
#endif
sContext = new AuthContext();//memset(&sContext, 0, sizeof(sContext));
db = pParse.db;
if ( pParse.nErr != 0 /*|| db.mallocFailed != 0 */ )
{
goto delete_from_cleanup;
}
Debug.Assert( pTabList.nSrc == 1 );
/* Locate the table which we want to delete. This table has to be
** put in an SrcList structure because some of the subroutines we
** will be calling are designed to work with multiple tables and expect
** an SrcList* parameter instead of just a Table* parameter.
*/
pTab = sqlite3SrcListLookup( pParse, pTabList );
if ( pTab == null ) goto delete_from_cleanup;
/* Figure out if we have any triggers and if the table being
** deleted from is a view
*/
#if !SQLITE_OMIT_TRIGGER
int iDummy = 0;
pTrigger = sqlite3TriggersExist( pParse, pTab, TK_DELETE, null, ref iDummy );
isView = pTab.pSelect != null;
#else
const Trigger pTrigger = null;
bool isView = false;
#endif
#if SQLITE_OMIT_VIEW
//# undef isView
isView = false;
#endif
/* If pTab is really a view, make sure it has been initialized.
*/
if ( sqlite3ViewGetColumnNames( pParse, pTab ) != 0 )
{
goto delete_from_cleanup;
}
if ( sqlite3IsReadOnly( pParse, pTab, ( pTrigger != null ? 1 : 0 ) ) )
{
goto delete_from_cleanup;
}
iDb = sqlite3SchemaToIndex( db, pTab.pSchema );
Debug.Assert( iDb < db.nDb );
zDb = db.aDb[iDb].zName;
#if !SQLITE_OMIT_AUTHORIZATION
rcauth = sqlite3AuthCheck(pParse, SQLITE_DELETE, pTab->zName, 0, zDb);
#else
rcauth = SQLITE_OK;
#endif
Debug.Assert( rcauth == SQLITE_OK || rcauth == SQLITE_DENY || rcauth == SQLITE_IGNORE );
if ( rcauth == SQLITE_DENY )
{
goto delete_from_cleanup;
}
Debug.Assert( !isView || pTrigger != null );
/* Assign cursor number to the table and all its indices.
*/
Debug.Assert( pTabList.nSrc == 1 );
iCur = pTabList.a[0].iCursor = pParse.nTab++;
for ( pIdx = pTab.pIndex; pIdx != null; pIdx = pIdx.pNext )
{
pParse.nTab++;
}
#if !SQLITE_OMIT_AUTHORIZATION
/* Start the view context
*/
if( isView ){
sqlite3AuthContextPush(pParse, sContext, pTab.zName);
}
#endif
/* Begin generating code.
*/
v = sqlite3GetVdbe( pParse );
if ( v == null )
{
goto delete_from_cleanup;
}
if ( pParse.nested == 0 ) sqlite3VdbeCountChanges( v );
sqlite3BeginWriteOperation( pParse, 1, iDb );
/* If we are trying to delete from a view, realize that view into
** a ephemeral table.
*/
#if !(SQLITE_OMIT_VIEW) && !(SQLITE_OMIT_TRIGGER)
if ( isView )
{
sqlite3MaterializeView( pParse, pTab, pWhere, iCur );
}
#endif
/* Resolve the column names in the WHERE clause.
*/
sNC = new NameContext();// memset( &sNC, 0, sizeof( sNC ) );
sNC.pParse = pParse;
sNC.pSrcList = pTabList;
if ( sqlite3ResolveExprNames( sNC, ref pWhere ) != 0 )
{
goto delete_from_cleanup;
}
/* Initialize the counter of the number of rows deleted, if
** we are counting rows.
*/
if ( ( db.flags & SQLITE_CountRows ) != 0 )
{
memCnt = ++pParse.nMem;
sqlite3VdbeAddOp2( v, OP_Integer, 0, memCnt );
}
#if !SQLITE_OMIT_TRUNCATE_OPTIMIZATION
/* Special case: A DELETE without a WHERE clause deletes everything.
** It is easier just to erase the whole table. Prior to version 3.6.5,
** this optimization caused the row change count (the value returned by
** API function sqlite3_count_changes) to be set incorrectly. */
if ( rcauth == SQLITE_OK && pWhere == null && null == pTrigger && !IsVirtual( pTab )
&& 0 == sqlite3FkRequired( pParse, pTab, null, 0 )
)
{
Debug.Assert( !isView );
sqlite3VdbeAddOp4( v, OP_Clear, pTab.tnum, iDb, memCnt,
pTab.zName, P4_STATIC );
for ( pIdx = pTab.pIndex; pIdx != null; pIdx = pIdx.pNext )
{
Debug.Assert( pIdx.pSchema == pTab.pSchema );
sqlite3VdbeAddOp2( v, OP_Clear, pIdx.tnum, iDb );
}
}
else
#endif //* SQLITE_OMIT_TRUNCATE_OPTIMIZATION */
/* The usual case: There is a WHERE clause so we have to scan through
** the table and pick which records to delete.
*/
{
int iRowSet = ++pParse.nMem; /* Register for rowset of rows to delete */
int iRowid = ++pParse.nMem; /* Used for storing rowid values. */
int regRowid; /* Actual register containing rowids */
/* Collect rowids of every row to be deleted.
*/
sqlite3VdbeAddOp2( v, OP_Null, 0, iRowSet );
ExprList elDummy = null;
pWInfo = sqlite3WhereBegin( pParse, pTabList, pWhere, ref elDummy, WHERE_DUPLICATES_OK );
if ( pWInfo == null ) goto delete_from_cleanup;
regRowid = sqlite3ExprCodeGetColumn( pParse, pTab, -1, iCur, iRowid );
sqlite3VdbeAddOp2( v, OP_RowSetAdd, iRowSet, regRowid );
if ( ( db.flags & SQLITE_CountRows ) != 0 )
{
sqlite3VdbeAddOp2( v, OP_AddImm, memCnt, 1 );
}
sqlite3WhereEnd( pWInfo );
/* Delete every item whose key was written to the list during the
** database scan. We have to delete items after the scan is complete
** because deleting an item can change the scan order. */
end = sqlite3VdbeMakeLabel( v );
/* Unless this is a view, open cursors for the table we are
** deleting from and all its indices. If this is a view, then the
** only effect this statement has is to fire the INSTEAD OF
** triggers. */
if ( !isView )
{
sqlite3OpenTableAndIndices( pParse, pTab, iCur, OP_OpenWrite );
}
addr = sqlite3VdbeAddOp3( v, OP_RowSetRead, iRowSet, end, iRowid );
/* Delete the row */
#if !SQLITE_OMIT_VIRTUALTABLE
if( IsVirtual(pTab) ){
const char *pVTab = (const char *)sqlite3GetVTable(db, pTab);
sqlite3VtabMakeWritable(pParse, pTab);
sqlite3VdbeAddOp4(v, OP_VUpdate, 0, 1, iRowid, pVTab, P4_VTAB);
sqlite3MayAbort(pParse);
}else
#endif
{
int count = ( pParse.nested == 0 ) ? 1 : 0; /* True to count changes */
sqlite3GenerateRowDelete( pParse, pTab, iCur, iRowid, count, pTrigger, OE_Default );
}
/* End of the delete loop */
sqlite3VdbeAddOp2( v, OP_Goto, 0, addr );
sqlite3VdbeResolveLabel( v, end );
/* Close the cursors open on the table and its indexes. */
if ( !isView && !IsVirtual( pTab ) )
{
for ( i = 1, pIdx = pTab.pIndex; pIdx != null; i++, pIdx = pIdx.pNext )
{
sqlite3VdbeAddOp2( v, OP_Close, iCur + i, pIdx.tnum );
}
sqlite3VdbeAddOp1( v, OP_Close, iCur );
}
}
/* Update the sqlite_sequence table by storing the content of the
** maximum rowid counter values recorded while inserting into
** autoincrement tables.
*/
if ( pParse.nested == 0 && pParse.pTriggerTab == null )
{
sqlite3AutoincrementEnd( pParse );
}
/* Return the number of rows that were deleted. If this routine is
** generating code because of a call to sqlite3NestedParse(), do not
** invoke the callback function.
*/
if ( ( db.flags & SQLITE_CountRows ) != 0 && 0 == pParse.nested && null == pParse.pTriggerTab )
{
sqlite3VdbeAddOp2( v, OP_ResultRow, memCnt, 1 );
sqlite3VdbeSetNumCols( v, 1 );
sqlite3VdbeSetColName( v, 0, COLNAME_NAME, "rows deleted", SQLITE_STATIC );
}
delete_from_cleanup:
#if !SQLITE_OMIT_AUTHORIZATION
sqlite3AuthContextPop(sContext);
#endif
sqlite3SrcListDelete( db, ref pTabList );
sqlite3ExprDelete( db, ref pWhere );
return;
}
/* Make sure "isView" and other macros defined above are undefined. Otherwise
** thely may interfere with compilation of other functions in this file
** (or in another file, if this file becomes part of the amalgamation). */
//#ifdef isView
// #undef isView
//#endif
//#ifdef pTrigger
// #undef pTrigger
//#endif
/*
** This routine generates VDBE code that causes a single row of a
** single table to be deleted.
**
** The VDBE must be in a particular state when this routine is called.
** These are the requirements:
**
** 1. A read/write cursor pointing to pTab, the table containing the row
** to be deleted, must be opened as cursor number $iCur.
**
** 2. Read/write cursors for all indices of pTab must be open as
** cursor number base+i for the i-th index.
**
** 3. The record number of the row to be deleted must be stored in
** memory cell iRowid.
**
** This routine generates code to remove both the table record and all
** index entries that point to that record.
*/
static void sqlite3GenerateRowDelete(
Parse pParse, /* Parsing context */
Table pTab, /* Table containing the row to be deleted */
int iCur, /* VdbeCursor number for the table */
int iRowid, /* Memory cell that contains the rowid to delete */
int count, /* If non-zero, increment the row change counter */
Trigger pTrigger, /* List of triggers to (potentially) fire */
int onconf /* Default ON CONFLICT policy for triggers */
)
{
Vdbe v = pParse.pVdbe; /* Vdbe */
int iOld = 0; /* First register in OLD.* array */
int iLabel; /* Label resolved to end of generated code */
/* Vdbe is guaranteed to have been allocated by this stage. */
Debug.Assert( v != null );
/* Seek cursor iCur to the row to delete. If this row no longer exists
** (this can happen if a trigger program has already deleted it), do
** not attempt to delete it or fire any DELETE triggers. */
iLabel = sqlite3VdbeMakeLabel( v );
sqlite3VdbeAddOp3( v, OP_NotExists, iCur, iLabel, iRowid );
/* If there are any triggers to fire, allocate a range of registers to
** use for the old.* references in the triggers. */
if ( sqlite3FkRequired( pParse, pTab, null, 0 ) != 0 || pTrigger != null )
{
u32 mask; /* Mask of OLD.* columns in use */
int iCol; /* Iterator used while populating OLD.* */
/* TODO: Could use temporary registers here. Also could attempt to
** avoid copying the contents of the rowid register. */
mask = sqlite3TriggerColmask(
pParse, pTrigger, null, 0, TRIGGER_BEFORE | TRIGGER_AFTER, pTab, onconf
);
mask |= sqlite3FkOldmask( pParse, pTab );
iOld = pParse.nMem + 1;
pParse.nMem += ( 1 + pTab.nCol );
/* Populate the OLD.* pseudo-table register array. These values will be
** used by any BEFORE and AFTER triggers that exist. */
sqlite3VdbeAddOp2( v, OP_Copy, iRowid, iOld );
for ( iCol = 0; iCol < pTab.nCol; iCol++ )
{
if ( mask == 0xffffffff || ( mask & ( 1 << iCol ) ) != 0 )
{
int iTarget = iOld + iCol + 1;
sqlite3VdbeAddOp3( v, OP_Column, iCur, iCol, iTarget );
sqlite3ColumnDefault( v, pTab, iCol, iTarget );
}
}
/* Invoke BEFORE DELETE trigger programs. */
sqlite3CodeRowTrigger( pParse, pTrigger,
TK_DELETE, null, TRIGGER_BEFORE, pTab, iOld, onconf, iLabel
);
/* Seek the cursor to the row to be deleted again. It may be that
** the BEFORE triggers coded above have already removed the row
** being deleted. Do not attempt to delete the row a second time, and
** do not fire AFTER triggers. */
sqlite3VdbeAddOp3( v, OP_NotExists, iCur, iLabel, iRowid );
/* Do FK processing. This call checks that any FK constraints that
** refer to this table (i.e. constraints attached to other tables)
** are not violated by deleting this row. */
sqlite3FkCheck( pParse, pTab, iOld, 0 );
}
/* Delete the index and table entries. Skip this step if pTab is really
** a view (in which case the only effect of the DELETE statement is to
** fire the INSTEAD OF triggers). */
if ( pTab.pSelect == null )
{
sqlite3GenerateRowIndexDelete( pParse, pTab, iCur, 0 );
sqlite3VdbeAddOp2( v, OP_Delete, iCur, ( count != 0 ? (int)OPFLAG_NCHANGE : 0 ) );
if ( count != 0 )
{
sqlite3VdbeChangeP4( v, -1, pTab.zName, P4_STATIC );
}
}
/* Do any ON CASCADE, SET NULL or SET DEFAULT operations required to
** handle rows (possibly in other tables) that refer via a foreign key
** to the row just deleted. */
sqlite3FkActions( pParse, pTab, null, iOld );
/* Invoke AFTER DELETE trigger programs. */
sqlite3CodeRowTrigger( pParse, pTrigger,
TK_DELETE, null, TRIGGER_AFTER, pTab, iOld, onconf, iLabel
);
/* Jump here if the row had already been deleted before any BEFORE
** trigger programs were invoked. Or if a trigger program throws a
** RAISE(IGNORE) exception. */
sqlite3VdbeResolveLabel( v, iLabel );
}
/*
** This routine generates VDBE code that causes the deletion of all
** index entries associated with a single row of a single table.
**
** The VDBE must be in a particular state when this routine is called.
** These are the requirements:
**
** 1. A read/write cursor pointing to pTab, the table containing the row
** to be deleted, must be opened as cursor number "iCur".
**
** 2. Read/write cursors for all indices of pTab must be open as
** cursor number iCur+i for the i-th index.
**
** 3. The "iCur" cursor must be pointing to the row that is to be
** deleted.
*/
static void sqlite3GenerateRowIndexDelete(
Parse pParse, /* Parsing and code generating context */
Table pTab, /* Table containing the row to be deleted */
int iCur, /* VdbeCursor number for the table */
int nothing /* Only delete if aRegIdx!=0 && aRegIdx[i]>0 */
)
{
int[] aRegIdx = null;
sqlite3GenerateRowIndexDelete( pParse, pTab, iCur, aRegIdx );
}
static void sqlite3GenerateRowIndexDelete(
Parse pParse, /* Parsing and code generating context */
Table pTab, /* Table containing the row to be deleted */
int iCur, /* VdbeCursor number for the table */
int[] aRegIdx /* Only delete if aRegIdx!=0 && aRegIdx[i]>0 */
)
{
int i;
Index pIdx;
int r1;
for ( i = 1, pIdx = pTab.pIndex; pIdx != null; i++, pIdx = pIdx.pNext )
{
if ( aRegIdx != null && aRegIdx[i - 1] == 0 ) continue;
r1 = sqlite3GenerateIndexKey( pParse, pIdx, iCur, 0, false );
sqlite3VdbeAddOp3( pParse.pVdbe, OP_IdxDelete, iCur + i, r1, pIdx.nColumn + 1 );
}
}
/*
** Generate code that will assemble an index key and put it in register
** regOut. The key with be for index pIdx which is an index on pTab.
** iCur is the index of a cursor open on the pTab table and pointing to
** the entry that needs indexing.
**
** Return a register number which is the first in a block of
** registers that holds the elements of the index key. The
** block of registers has already been deallocated by the time
** this routine returns.
*/
static int sqlite3GenerateIndexKey(
Parse pParse, /* Parsing context */
Index pIdx, /* The index for which to generate a key */
int iCur, /* VdbeCursor number for the pIdx.pTable table */
int regOut, /* Write the new index key to this register */
bool doMakeRec /* Run the OP_MakeRecord instruction if true */
)
{
Vdbe v = pParse.pVdbe;
int j;
Table pTab = pIdx.pTable;
int regBase;
int nCol;
nCol = pIdx.nColumn;
regBase = sqlite3GetTempRange( pParse, nCol + 1 );
sqlite3VdbeAddOp2( v, OP_Rowid, iCur, regBase + nCol );
for ( j = 0; j < nCol; j++ )
{
int idx = pIdx.aiColumn[j];
if ( idx == pTab.iPKey )
{
sqlite3VdbeAddOp2( v, OP_SCopy, regBase + nCol, regBase + j );
}
else
{
sqlite3VdbeAddOp3( v, OP_Column, iCur, idx, regBase + j );
sqlite3ColumnDefault( v, pTab, idx, -1 );
}
}
if ( doMakeRec )
{
sqlite3VdbeAddOp3( v, OP_MakeRecord, regBase, nCol + 1, regOut );
sqlite3VdbeChangeP4( v, -1, sqlite3IndexAffinityStr( v, pIdx ), 0 );
}
sqlite3ReleaseTempRange( pParse, regBase, nCol + 1 );
return regBase;
}
}
}
| |
//---------------------------------------------------------------------------
//
// Copyright (C) Microsoft Corporation. All rights reserved.
//
// File: PropertyPath.cs
//
// Description:
// Used to describe a property as a "path" below another property.
//
// Example: "Background.Opacity" is a path. It implies:
// * Find the Background property, get the value object of that property.
// * What we want is the Opacity property on that object.
//
//---------------------------------------------------------------------------
using System.ComponentModel;
using System.Diagnostics;
using System.Globalization;
using System.Reflection;
using System.Text;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.Collections.Specialized;
using System.Windows.Data;
using System.Windows.Threading;
using System.Windows.Markup;
using MS.Internal;
using MS.Internal.Data;
using MS.Utility; // FrugalList
// A property path really consists of two parts: a static part (PropertyPath)
// that describes the path, and a dynamic part (PropertyPathWorker) that knows
// how to evaluate the path, relative to a "root item".
//
// PropertyPath supports two modes of behavior:
//
// "Source" mode is appropriate when the path describes a "source" - some place
// from which we'll fetch values. The user of PropertyPath typically creates
// workers explicitly - one for each root item - and calls them directly. The
// workers are fully dynamic; they listen for property and currency change
// events, maintain dependency sources, etc. The connection between the worker
// and its root item is long-lived. This mode is used by the Binding class in
// support of data binding.
//
// "Target" mode is appropriate when the path describes a "target" - some place
// into which we'll store values. The user of PropertyPath typically does not
// create workers, but rather calls the convenience routines in PropertyPath
// (relying on the implicit "single" worker). The connection between the
// worker and its root item is short-lived; the caller typically connects to
// a root item, calls a few methods, then disconnects. This mode is used by
// the property engine and by animation in support of timeline setters.
namespace System.Windows
{
internal enum PropertyPathStatus : byte { Inactive, Active, PathError, AsyncRequestPending }
internal struct IndexerParameterInfo
{
public Type type; // user-specified type
public object value; // string or strongly typed value
}
/// <summary>
/// Data structure for describing a property as a path below another
/// </summary>
[TypeConverter(typeof(PropertyPathConverter))]
public sealed class PropertyPath
{
//------------------------------------------------------
//
// Constructors
//
//------------------------------------------------------
/// <summary>
/// Construct a PropertyPath from a string and a list of parameters
/// </summary>
public PropertyPath(string path, params object[] pathParameters)
{
if (System.Windows.Threading.Dispatcher.CurrentDispatcher == null)
throw new InvalidOperationException(); // This is actually never called since CurrentDispatcher will throw if null.
_path = path;
if (pathParameters != null && pathParameters.Length > 0)
{
// initialize internal pathParameters list
PathParameterCollection parameters = new PathParameterCollection(pathParameters);
SetPathParameterCollection(parameters);
}
PrepareSourceValueInfo(null);
}
/// <summary>
/// Public constructor that takes a single parameter. This is
/// the degenerate PropertyPath (a path of a single step).
/// </summary>
public PropertyPath(object parameter)
: this(SingleStepPath, parameter)
{
}
// This constructor is for use by the PropertyPathConverter
internal PropertyPath(string path, ITypeDescriptorContext typeDescriptorContext)
{
_path = path;
PrepareSourceValueInfo(typeDescriptorContext);
NormalizePath();
}
//------------------------------------------------------
//
// Public properties
//
//------------------------------------------------------
/// <summary> The string describing the path. </summary>
public string Path
{
get { return _path; }
set
{
_path = value;
PrepareSourceValueInfo(null);
}
}
/// <summary>
/// The list of parameters to use when the
/// path refers to indexed parameters.
/// Each parameter in the list should be a DependencyProperty,
/// a PropertyInfo, or a PropertyDescriptor.
/// </summary>
public Collection<object> PathParameters
{
get
{
if (_parameters == null)
{
SetPathParameterCollection(new PathParameterCollection());
}
return _parameters;
}
}
//------------------------------------------------------
//
// Internal properties
//
//------------------------------------------------------
// the number of levels in the path
internal int Length { get { return _arySVI.Length; } }
// the status of the PropertyPath
internal PropertyPathStatus Status { get { return SingleWorker.Status; } }
// the most recent error message
internal string LastError { get { return _lastError; } }
// convenience properties for a frequent special case
internal object LastItem { get { return GetItem(Length - 1); } }
internal object LastAccessor { get { return GetAccessor(Length - 1); } }
internal object[] LastIndexerArguments { get { return GetIndexerArguments(Length - 1); } }
// test for static properties
internal bool StartsWithStaticProperty { get { return Length > 0 && IsStaticProperty(_earlyBoundPathParts[0]); } }
internal static bool IsStaticProperty(object accessor)
{
MethodInfo mi;
DependencyProperty dp;
PropertyInfo pi;
PropertyDescriptor pd;
DynamicObjectAccessor doa;
DowncastAccessor(accessor, out dp, out pi, out pd, out doa);
if (pi != null)
{
mi = pi.GetGetMethod();
return mi != null && mi.IsStatic;
}
return false;
}
//------------------------------------------------------
//
// Internal methods
//
//------------------------------------------------------
// Convert an "accessor" into one of the legal types
internal static void DowncastAccessor(object accessor,
out DependencyProperty dp, out PropertyInfo pi, out PropertyDescriptor pd, out DynamicObjectAccessor doa)
{
if ((dp = accessor as DependencyProperty) != null)
{
pd = null;
pi = null;
doa = null;
}
else if ((pi = accessor as PropertyInfo) != null)
{
pd = null;
doa = null;
}
else if ((pd = accessor as PropertyDescriptor) != null)
{
doa = null;
}
else
{
doa = accessor as DynamicObjectAccessor;
}
}
// Set the context for the path. Use this method in "target" mode
// to connect the path to a rootItem for a short time:
// using (path.SetContext(myItem))
// {
// ... call target-mode convenience methods ...
// }
internal IDisposable SetContext(object rootItem)
{
return SingleWorker.SetContext(rootItem);
}
// return the item for level k. This is the result of evaluating the
// path up to level k-1, starting at the root item.
internal object GetItem(int k)
{
return SingleWorker.GetItem(k);
}
// return the "accessor" for level k. This is the object used to get
// the value of level k (together with the level-k item). It can be
// a DP, a PropertyInfo, a PropertyDescriptor, etc.
internal object GetAccessor(int k)
{
object accessor = _earlyBoundPathParts[k];
if (accessor == null)
{
accessor = SingleWorker.GetAccessor(k);
}
return accessor;
}
// return the arguments to use when the accessor at level k is an
// indexer. (If it's not an indexer, this returns null.)
internal object[] GetIndexerArguments(int k)
{
return SingleWorker.GetIndexerArguments(k);
}
// return the value of the path. Must be called within the scope
// of SetContext.
internal object GetValue()
{
return SingleWorker.RawValue();
}
// return the number of unresolved attached properties (called by Binding)
internal int ComputeUnresolvedAttachedPropertiesInPath()
{
// the path uses attached properties by the syntax (ClassName.PropName).
// If there are any such properties in the path, the binding needs the
// tree context to resolve the class name.
int result = 0;
for (int k=Length-1; k>=0; --k)
{
if (_earlyBoundPathParts[k] == null)
{
string name = _arySVI[k].name;
if (IsPropertyReference(name))
{
// a dot inside parens, when there's no early-bound accessor,
// is an unresolved PD name
if (name.IndexOf('.') >= 0)
++ result;
}
}
}
return result;
}
//------------------------------------------------------
//
// Internal properties and methods for use by PropertyPathWorker only
//
//------------------------------------------------------
internal SourceValueInfo[] SVI
{
get
{
//Debug.Assert(Helper.IsCallerOfType(typeof(PropertyPathWorker)));
return _arySVI;
}
}
internal object ResolvePropertyName(int level, object item, Type ownerType, object context)
{
//Debug.Assert(Helper.IsCallerOfType(typeof(PropertyPathWorker)));
// if user told us explicitly what to use, use it
object accessor = _earlyBoundPathParts[level];
if (accessor == null)
{
accessor = ResolvePropertyName(_arySVI[level].name, item, ownerType, context, false);
}
return accessor;
}
internal IndexerParameterInfo[] ResolveIndexerParams(int level, object context)
{
IndexerParameterInfo[] parameters = _earlyBoundPathParts[level] as IndexerParameterInfo[];
if (parameters == null)
{
parameters = ResolveIndexerParams(_arySVI[level].paramList, context, false);
}
return parameters;
}
// PropertyPathWorker may choose to replace an indexer by a property
internal void ReplaceIndexerByProperty(int level, string name)
{
_arySVI[level].name = name;
_arySVI[level].propertyName = name;
_arySVI[level].type = SourceValueType.Property;
_earlyBoundPathParts[level] = null;
}
//------------------------------------------------------
//
// Private properties
//
//------------------------------------------------------
PropertyPathWorker SingleWorker
{
get
{
if (_singleWorker == null)
_singleWorker = new PropertyPathWorker(this);
return _singleWorker;
}
}
//------------------------------------------------------
//
// Private methods
//
//------------------------------------------------------
// parse the path to figure out what kind of
// SourceValueInfo we're going to need
private void PrepareSourceValueInfo(ITypeDescriptorContext typeDescriptorContext)
{
PathParser parser = DataBindEngine.CurrentDataBindEngine.PathParser;
_arySVI = parser.Parse(Path);
if (_arySVI.Length == 0)
{
string detail = parser.Error;
if (detail == null)
detail = Path;
throw new InvalidOperationException(SR.Get(SRID.PropertyPathSyntaxError, detail));
}
ResolvePathParts(typeDescriptorContext);
}
// "normalize" the path - i.e. load the PathParameters with the early-bound
// accessors, and replace the corresponding parts of the path with
// parameter references
private void NormalizePath()
{
StringBuilder builder = new StringBuilder();
PathParameterCollection parameters = new PathParameterCollection();
for (int i=0; i<_arySVI.Length; ++i)
{
switch (_arySVI[i].drillIn)
{
case DrillIn.Always:
builder.Append('/');
break;
case DrillIn.Never:
if (_arySVI[i].type == SourceValueType.Property)
{
builder.Append('.');
}
break;
case DrillIn.IfNeeded:
break;
}
switch (_arySVI[i].type)
{
case SourceValueType.Property:
if (_earlyBoundPathParts[i] != null)
{
builder.Append('(');
builder.Append(parameters.Count.ToString(TypeConverterHelper.InvariantEnglishUS.NumberFormat));
builder.Append(')');
parameters.Add(_earlyBoundPathParts[i]);
}
else
{
builder.Append(_arySVI[i].name);
}
break;
case SourceValueType.Indexer:
builder.Append('[');
if (_earlyBoundPathParts[i] != null)
{
IndexerParameterInfo[] aryIPI = (IndexerParameterInfo[])_earlyBoundPathParts[i];
// the params should be at the very least a single empty string
Debug.Assert(aryIPI.Length > 0);
int j = 0;
while (true)
{
IndexerParameterInfo info = aryIPI[j];
if (info.type != null)
{
builder.Append('(');
builder.Append(parameters.Count.ToString(TypeConverterHelper.InvariantEnglishUS.NumberFormat));
builder.Append(')');
parameters.Add(info.value);
}
else
{
builder.Append(info.value);
}
++j;
if (j < aryIPI.Length)
{
builder.Append(',');
}
else
{
break;
}
}
}
else
{
builder.Append(_arySVI[i].name);
}
builder.Append(']');
break;
case SourceValueType.Direct:
break;
}
}
if (parameters.Count > 0)
{
_path = builder.ToString();
SetPathParameterCollection(parameters);
}
}
// set new parameter collection; update collection change notification handler
private void SetPathParameterCollection(PathParameterCollection parameters)
{
if (_parameters != null)
{
_parameters.CollectionChanged -= new NotifyCollectionChangedEventHandler(ParameterCollectionChanged);
}
_parameters = parameters;
if (_parameters != null)
{
_parameters.CollectionChanged += new NotifyCollectionChangedEventHandler(ParameterCollectionChanged);
}
}
// path parameters were added/removed, update SourceValueInfo
private void ParameterCollectionChanged(object sender, NotifyCollectionChangedEventArgs e)
{
PrepareSourceValueInfo(null);
}
// resolve the property names and path parameters early, if possible
void ResolvePathParts(ITypeDescriptorContext typeDescriptorContext)
{
bool throwOnError = (typeDescriptorContext != null);
object context = null;
TypeConvertContext typeConvertContext = typeDescriptorContext as TypeConvertContext;
if( typeConvertContext != null )
context = typeConvertContext.ParserContext;
if (context == null)
context = typeDescriptorContext;
_earlyBoundPathParts = new object[Length];
for (int level=Length-1; level>=0; --level)
{
if (_arySVI[level].type == SourceValueType.Property)
{
string name = _arySVI[level].name;
if (IsPropertyReference(name))
{
object accessor = ResolvePropertyName(name, null, null, context, throwOnError);
_earlyBoundPathParts[level] = accessor;
if (accessor != null)
{
_arySVI[level].propertyName = GetPropertyName(accessor);
}
}
else
{
_arySVI[level].propertyName = name;
}
}
else if (_arySVI[level].type == SourceValueType.Indexer)
{
IndexerParameterInfo[] indexerParams = ResolveIndexerParams(_arySVI[level].paramList, context, throwOnError);
_earlyBoundPathParts[level] = indexerParams;
_arySVI[level].propertyName = Binding.IndexerName;
}
}
}
// resolve a single DP name
object ResolvePropertyName(string name, object item, Type ownerType, object context, bool throwOnError)
{
string propertyName = name;
int index;
// first see if the name is an index into the parameter list
if (IsParameterIndex(name, out index))
{
if (0 <= index && index < PathParameters.Count)
{
object accessor = PathParameters[index];
// always throw if the accessor isn't valid - this error cannot
// be corrected later on.
if (!IsValidAccessor(accessor))
throw new InvalidOperationException(SR.Get(SRID.PropertyPathInvalidAccessor,
(accessor != null) ? accessor.GetType().FullName : "null"));
return accessor;
}
else if (throwOnError)
throw new InvalidOperationException(SR.Get(SRID.PathParametersIndexOutOfRange, index, PathParameters.Count));
else return null;
}
// handle attached-property syntax: (TypeName.PropertyName)
if (IsPropertyReference(name))
{
name = name.Substring(1, name.Length-2);
int lastIndex = name.LastIndexOf('.');
if (lastIndex >= 0)
{
// attached property - get the owner type
propertyName = name.Substring(lastIndex + 1).Trim();
string ownerName = name.Substring(0, lastIndex).Trim();
ownerType = GetTypeFromName(ownerName, context);
if (ownerType == null && throwOnError)
throw new InvalidOperationException(SR.Get(SRID.PropertyPathNoOwnerType, ownerName));
}
else
{
// simple name in parens - just strip the parens
propertyName = name;
}
}
if (ownerType != null)
{
// get an appropriate accessor from the ownerType and propertyName.
// We prefer accessors in a certain order, defined below.
object accessor;
// 1. DependencyProperty on the given type.
accessor = DependencyProperty.FromName(propertyName, ownerType);
// 2. PropertyDescriptor from item's custom lookup.
// When the item implements custom properties, we must use them.
if (accessor == null && item is ICustomTypeDescriptor)
{
accessor = TypeDescriptor.GetProperties(item)[propertyName];
}
// 3a. PropertyInfo, when item exposes INotifyPropertyChanged.
// 3b. PropertyInfo, when item is a DependencyObject (bug 1373351).
// This uses less working set than PropertyDescriptor, and we don't need
// the ValueChanged pattern. (If item is a DO and wants to raise
// change notifications, it should make the property a DP.)
if (accessor == null &&
(item is INotifyPropertyChanged || item is DependencyObject))
{
accessor = GetPropertyHelper(ownerType, propertyName);
}
// 4. PropertyDescriptor (obtain from item - this is reputedly
// slower than obtaining from type, but the latter doesn't
// discover properties obtained from TypeDescriptorProvider -
// see bug 1713000).
// This supports the [....] ValueChanged pattern.
if (accessor == null && item != null)
{
accessor = TypeDescriptor.GetProperties(item)[propertyName];
}
// 5. PropertyInfo.
if (accessor == null)
{
accessor = GetPropertyHelper(ownerType, propertyName);
}
// 6. IDynamicMetaObjectProvider
// This supports the DLR's dynamic objects
if (accessor == null && SystemCoreHelper.IsIDynamicMetaObjectProvider(item))
{
accessor = SystemCoreHelper.NewDynamicPropertyAccessor(item.GetType(), propertyName);
}
if (accessor == null && throwOnError)
throw new InvalidOperationException(SR.Get(SRID.PropertyPathNoProperty, ownerType.Name, propertyName));
return accessor;
}
return null;
}
private PropertyInfo GetPropertyHelper(Type ownerType, string propertyName)
{
PropertyInfo result = null;
bool enumerateBaseClasses = false;
bool returnIndexerProperty = false;
try
{
result = ownerType.GetProperty(propertyName, BindingFlags.Public | BindingFlags.Instance | BindingFlags.Static | BindingFlags.FlattenHierarchy);
}
catch (AmbiguousMatchException)
{
// this happens when ownerType hides a base class property with 'new'
// (it also happens by mistake when a non-generic class overrides
// a generic base class property - see DDB 105201).
// We'll resolve this by returning the most specific property.
enumerateBaseClasses = true;
}
if (enumerateBaseClasses)
{
try
{
for (result = null; result == null && ownerType != null; ownerType = ownerType.BaseType)
{
result = ownerType.GetProperty(propertyName, BindingFlags.DeclaredOnly | BindingFlags.Instance | BindingFlags.Public);
}
}
catch (AmbiguousMatchException)
{
// if a single class declares the property twice, it must be
// an indexed property (with different index parameters)
returnIndexerProperty = true;
}
}
if (PropertyPathWorker.IsIndexedProperty(result))
{
// the property is indexed (this can't happen in C#, but can in VB)
returnIndexerProperty = true;
}
if (returnIndexerProperty)
{
result = IndexerPropertyInfo.Instance;
}
return result;
}
// resolve indexer parameters
IndexerParameterInfo[] ResolveIndexerParams(FrugalObjectList<IndexerParamInfo> paramList, object context, bool throwOnError)
{
IndexerParameterInfo[] args = new IndexerParameterInfo[paramList.Count];
for (int i = 0; i < args.Length; ++i)
{
if (String.IsNullOrEmpty(paramList[i].parenString))
{
// no paren string "foo" - value is (uninterpreted) value string
args[i].value = paramList[i].valueString;
}
else if (String.IsNullOrEmpty(paramList[i].valueString))
{
// no value string "(2)" - value comes from PathParameter list
int index;
if (Int32.TryParse( paramList[i].parenString.Trim(),
NumberStyles.Integer,
TypeConverterHelper.InvariantEnglishUS.NumberFormat,
out index))
{
if (0 <= index && index < PathParameters.Count)
{
object value = PathParameters[index];
if (value != null)
{
args[i].value = value;
args[i].type = value.GetType();
}
else if (throwOnError)
{
// info.value will still be "(n)"
throw new InvalidOperationException(SR.Get(SRID.PathParameterIsNull, index));
}
}
else if (throwOnError)
throw new InvalidOperationException(SR.Get(SRID.PathParametersIndexOutOfRange, index, PathParameters.Count));
}
else
{
// parens didn't hold an integer "(abc)" - value is (uninterpreted) paren string
// [this could be considered an error, but the original code
// treated it like this, so to preserve compatibility...]
args[i].value = "(" + paramList[i].parenString + ")";
}
}
else
{
// both strings appear "(Double)3.14159" - value is type-converted from value string
args[i].type = GetTypeFromName(paramList[i].parenString, context);
if (args[i].type != null)
{
object value = GetTypedParamValue(paramList[i].valueString.Trim(), args[i].type, throwOnError);
if (value != null)
{
args[i].value = value;
}
else
{
if (throwOnError)
throw new InvalidOperationException(SR.Get(SRID.PropertyPathIndexWrongType, paramList[i].parenString, paramList[i].valueString));
args[i].type = null;
}
}
else
{
// parens didn't hold a type name "(abc)xyz" - value is (uninterpreted) string
// [this could be considered an error, but the original code
// treated it like this, so to preserve compatibility...]
args[i].value = "(" + paramList[i].parenString + ")" + paramList[i].valueString;
}
}
}
return args;
}
object GetTypedParamValue(string param, Type type, bool throwOnError)
{
object value = null;
if (type == typeof(string))
return param;
TypeConverter tc = TypeDescriptor.GetConverter(type);
if (tc != null && tc.CanConvertFrom(typeof(string)))
{
// PreSharp uses message numbers that the C# compiler doesn't know about.
// Disable the C# complaints, per the PreSharp documentation.
#pragma warning disable 1634, 1691
// PreSharp complains about catching NullReference (and other) exceptions.
// It doesn't recognize that IsCritical[Application]Exception() handles these correctly.
#pragma warning disable 56500
try
{
value = tc.ConvertFromString(null, CultureInfo.InvariantCulture,
param);
// technically the converter can return null as a legitimate
// value. In practice, this seems always to be a sign that
// the conversion didn't work (often because the converter
// reverts to the default behavior - returning null). So
// we treat null as an "error", and keep trying for something
// better. (See bug 861966)
}
// catch all exceptions. We simply want to move on to the next
// candidate indexer.
catch (Exception ex)
{
if (CriticalExceptions.IsCriticalApplicationException(ex) || throwOnError)
throw;
}
catch
{
if (throwOnError)
throw;
}
#pragma warning restore 56500
#pragma warning restore 1634, 1691
}
if (value == null && type.IsAssignableFrom(typeof(string)))
{
value = param;
}
return value;
}
// Return the type named by the given name
Type GetTypeFromName(string name, object context)
{
// use the parser context, if available. This allows early resolution.
// [....] 5/8/2009 - I believe with System.Xaml there is never an old parserContext here.
// But cannot be sure.
ParserContext parserContext = context as ParserContext;
if (parserContext != null)
{
// Find the namespace prefix
string nsPrefix;
int nsIndex = name.IndexOf(':');
if (nsIndex == -1)
nsPrefix = string.Empty;
else
{
// Found a namespace prefix separator, so create replacement _pathString.
// String processing - split "foons" from "BarClass.BazProp"
nsPrefix = name.Substring(0, nsIndex).TrimEnd();
name = name.Substring(nsIndex + 1).TrimStart();
}
// Find the namespace URI, even if its the default one
string namespaceURI = parserContext.XmlnsDictionary[nsPrefix];
if (namespaceURI == null)
{
throw new ArgumentException(SR.Get(SRID.ParserPrefixNSProperty, nsPrefix, name));
}
TypeAndSerializer typeAndSerializer = parserContext.XamlTypeMapper.GetTypeOnly(namespaceURI, name);
return (typeAndSerializer != null) ? typeAndSerializer.ObjectType : null;
}
else
{
if (context is IServiceProvider)
{
IXamlTypeResolver xtr = (context as IServiceProvider).GetService(typeof(IXamlTypeResolver)) as IXamlTypeResolver;
if (xtr != null)
{
return xtr.Resolve(name);
}
}
IValueSerializerContext serializerContext = context as IValueSerializerContext;
if (serializerContext != null)
{
ValueSerializer typeSerializer = ValueSerializer.GetSerializerFor(typeof(Type), serializerContext);
if (typeSerializer != null)
return typeSerializer.ConvertFromString(name, serializerContext) as Type;
}
}
// if there's no parser or serializer context, use the tree context
DependencyObject hostElement = context as DependencyObject;
if (hostElement == null)
{
if (FrameworkCompatibilityPreferences.TargetsDesktop_V4_0)
{
// app targets 4.0. Return null, for compat (Dev11 730107)
return null;
}
else
{
hostElement = new DependencyObject(); // at least pick up the default namespaces
}
}
var wpfSharedSchemaContext = XamlReader.BamlSharedSchemaContext;
Type type = wpfSharedSchemaContext.ResolvePrefixedNameWithAdditionalWpfSemantics(name, hostElement);
return type;
}
// return true if the name has the form: (property)
internal static bool IsPropertyReference(string name)
{
return (name != null && name.Length > 1 && name[0] == '(' && (name[name.Length - 1] == ')'));
}
// return true if the name has the form: (nnn)
internal static bool IsParameterIndex(string name, out int index)
{
if (IsPropertyReference(name))
{
name = name.Substring(1, name.Length - 2);
}
else
{
index = -1;
return false;
}
return Int32.TryParse( name,
NumberStyles.Integer,
TypeConverterHelper.InvariantEnglishUS.NumberFormat,
out index);
}
// determine if an object is one of the accessors we support
static bool IsValidAccessor(object accessor)
{
return accessor is DependencyProperty ||
accessor is PropertyInfo ||
accessor is PropertyDescriptor ||
accessor is DynamicObjectAccessor;
}
// determine the name of an accessor
static string GetPropertyName(object accessor)
{
DependencyProperty dp;
PropertyInfo pi;
PropertyDescriptor pd;
DynamicObjectAccessor doa;
if ((dp = accessor as DependencyProperty) != null)
return dp.Name;
else if ((pi = accessor as PropertyInfo) != null)
return pi.Name;
else if ((pd = accessor as PropertyDescriptor) != null)
return pd.Name;
else if ((doa = accessor as DynamicObjectAccessor) != null)
return doa.PropertyName;
else
{
Invariant.Assert(false, "Unknown accessor type");
return null;
}
}
//------------------------------------------------------
//
// Private Enums, Structs, Constants
//
//------------------------------------------------------
const string SingleStepPath = "(0)";
static readonly Char[] s_comma = new Char[]{','};
//------------------------------------------------------
//
// Private data
//
//------------------------------------------------------
string _path = String.Empty; // the path
PathParameterCollection _parameters; // list of DPs to inject into the path
SourceValueInfo[] _arySVI; // static description of each level in the path
string _lastError = String.Empty; // most recent error message
object[] _earlyBoundPathParts; // accessors and indexer parameters that got resolved early
PropertyPathWorker _singleWorker; // shared worker - used in "target" mode
//------------------------------------------------------
//
// Private types
//
//------------------------------------------------------
private class PathParameterCollection : ObservableCollection<object>
{
public PathParameterCollection()
{
}
public PathParameterCollection(object[] parameters)
{
IList<object> items = Items;
foreach (object o in parameters)
{
items.Add(o);
}
}
}
}
}
| |
/*
* Copyright 2010-2014 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
/*
* Do not modify this file. This file is generated from the ec2-2015-04-15.normal.json service model.
*/
using System;
using System.Collections.Generic;
using System.Xml.Serialization;
using System.Text;
using System.IO;
using Amazon.Runtime;
using Amazon.Runtime.Internal;
namespace Amazon.EC2.Model
{
/// <summary>
/// Describes a network interface.
/// </summary>
public partial class NetworkInterface
{
private NetworkInterfaceAssociation _association;
private NetworkInterfaceAttachment _attachment;
private string _availabilityZone;
private string _description;
private List<GroupIdentifier> _groups = new List<GroupIdentifier>();
private string _macAddress;
private string _networkInterfaceId;
private string _ownerId;
private string _privateDnsName;
private string _privateIpAddress;
private List<NetworkInterfacePrivateIpAddress> _privateIpAddresses = new List<NetworkInterfacePrivateIpAddress>();
private string _requesterId;
private bool? _requesterManaged;
private bool? _sourceDestCheck;
private NetworkInterfaceStatus _status;
private string _subnetId;
private List<Tag> _tagSet = new List<Tag>();
private string _vpcId;
/// <summary>
/// Gets and sets the property Association.
/// <para>
/// The association information for an Elastic IP associated with the network interface.
/// </para>
/// </summary>
public NetworkInterfaceAssociation Association
{
get { return this._association; }
set { this._association = value; }
}
// Check to see if Association property is set
internal bool IsSetAssociation()
{
return this._association != null;
}
/// <summary>
/// Gets and sets the property Attachment.
/// <para>
/// The network interface attachment.
/// </para>
/// </summary>
public NetworkInterfaceAttachment Attachment
{
get { return this._attachment; }
set { this._attachment = value; }
}
// Check to see if Attachment property is set
internal bool IsSetAttachment()
{
return this._attachment != null;
}
/// <summary>
/// Gets and sets the property AvailabilityZone.
/// <para>
/// The Availability Zone.
/// </para>
/// </summary>
public string AvailabilityZone
{
get { return this._availabilityZone; }
set { this._availabilityZone = value; }
}
// Check to see if AvailabilityZone property is set
internal bool IsSetAvailabilityZone()
{
return this._availabilityZone != null;
}
/// <summary>
/// Gets and sets the property Description.
/// <para>
/// A description.
/// </para>
/// </summary>
public string Description
{
get { return this._description; }
set { this._description = value; }
}
// Check to see if Description property is set
internal bool IsSetDescription()
{
return this._description != null;
}
/// <summary>
/// Gets and sets the property Groups.
/// <para>
/// Any security groups for the network interface.
/// </para>
/// </summary>
public List<GroupIdentifier> Groups
{
get { return this._groups; }
set { this._groups = value; }
}
// Check to see if Groups property is set
internal bool IsSetGroups()
{
return this._groups != null && this._groups.Count > 0;
}
/// <summary>
/// Gets and sets the property MacAddress.
/// <para>
/// The MAC address.
/// </para>
/// </summary>
public string MacAddress
{
get { return this._macAddress; }
set { this._macAddress = value; }
}
// Check to see if MacAddress property is set
internal bool IsSetMacAddress()
{
return this._macAddress != null;
}
/// <summary>
/// Gets and sets the property NetworkInterfaceId.
/// <para>
/// The ID of the network interface.
/// </para>
/// </summary>
public string NetworkInterfaceId
{
get { return this._networkInterfaceId; }
set { this._networkInterfaceId = value; }
}
// Check to see if NetworkInterfaceId property is set
internal bool IsSetNetworkInterfaceId()
{
return this._networkInterfaceId != null;
}
/// <summary>
/// Gets and sets the property OwnerId.
/// <para>
/// The AWS account ID of the owner of the network interface.
/// </para>
/// </summary>
public string OwnerId
{
get { return this._ownerId; }
set { this._ownerId = value; }
}
// Check to see if OwnerId property is set
internal bool IsSetOwnerId()
{
return this._ownerId != null;
}
/// <summary>
/// Gets and sets the property PrivateDnsName.
/// <para>
/// The private DNS name.
/// </para>
/// </summary>
public string PrivateDnsName
{
get { return this._privateDnsName; }
set { this._privateDnsName = value; }
}
// Check to see if PrivateDnsName property is set
internal bool IsSetPrivateDnsName()
{
return this._privateDnsName != null;
}
/// <summary>
/// Gets and sets the property PrivateIpAddress.
/// <para>
/// The IP address of the network interface within the subnet.
/// </para>
/// </summary>
public string PrivateIpAddress
{
get { return this._privateIpAddress; }
set { this._privateIpAddress = value; }
}
// Check to see if PrivateIpAddress property is set
internal bool IsSetPrivateIpAddress()
{
return this._privateIpAddress != null;
}
/// <summary>
/// Gets and sets the property PrivateIpAddresses.
/// <para>
/// The private IP addresses associated with the network interface.
/// </para>
/// </summary>
public List<NetworkInterfacePrivateIpAddress> PrivateIpAddresses
{
get { return this._privateIpAddresses; }
set { this._privateIpAddresses = value; }
}
// Check to see if PrivateIpAddresses property is set
internal bool IsSetPrivateIpAddresses()
{
return this._privateIpAddresses != null && this._privateIpAddresses.Count > 0;
}
/// <summary>
/// Gets and sets the property RequesterId.
/// <para>
/// The ID of the entity that launched the instance on your behalf (for example, AWS Management
/// Console or Auto Scaling).
/// </para>
/// </summary>
public string RequesterId
{
get { return this._requesterId; }
set { this._requesterId = value; }
}
// Check to see if RequesterId property is set
internal bool IsSetRequesterId()
{
return this._requesterId != null;
}
/// <summary>
/// Gets and sets the property RequesterManaged.
/// <para>
/// Indicates whether the network interface is being managed by AWS.
/// </para>
/// </summary>
public bool RequesterManaged
{
get { return this._requesterManaged.GetValueOrDefault(); }
set { this._requesterManaged = value; }
}
// Check to see if RequesterManaged property is set
internal bool IsSetRequesterManaged()
{
return this._requesterManaged.HasValue;
}
/// <summary>
/// Gets and sets the property SourceDestCheck.
/// <para>
/// Indicates whether traffic to or from the instance is validated.
/// </para>
/// </summary>
public bool SourceDestCheck
{
get { return this._sourceDestCheck.GetValueOrDefault(); }
set { this._sourceDestCheck = value; }
}
// Check to see if SourceDestCheck property is set
internal bool IsSetSourceDestCheck()
{
return this._sourceDestCheck.HasValue;
}
/// <summary>
/// Gets and sets the property Status.
/// <para>
/// The status of the network interface.
/// </para>
/// </summary>
public NetworkInterfaceStatus Status
{
get { return this._status; }
set { this._status = value; }
}
// Check to see if Status property is set
internal bool IsSetStatus()
{
return this._status != null;
}
/// <summary>
/// Gets and sets the property SubnetId.
/// <para>
/// The ID of the subnet.
/// </para>
/// </summary>
public string SubnetId
{
get { return this._subnetId; }
set { this._subnetId = value; }
}
// Check to see if SubnetId property is set
internal bool IsSetSubnetId()
{
return this._subnetId != null;
}
/// <summary>
/// Gets and sets the property TagSet.
/// <para>
/// Any tags assigned to the network interface.
/// </para>
/// </summary>
public List<Tag> TagSet
{
get { return this._tagSet; }
set { this._tagSet = value; }
}
// Check to see if TagSet property is set
internal bool IsSetTagSet()
{
return this._tagSet != null && this._tagSet.Count > 0;
}
/// <summary>
/// Gets and sets the property VpcId.
/// <para>
/// The ID of the VPC.
/// </para>
/// </summary>
public string VpcId
{
get { return this._vpcId; }
set { this._vpcId = value; }
}
// Check to see if VpcId property is set
internal bool IsSetVpcId()
{
return this._vpcId != null;
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: Protos.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Messages {
/// <summary>Holder for reflection information generated from Protos.proto</summary>
public static partial class ProtosReflection {
#region Descriptor
/// <summary>File descriptor for Protos.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static ProtosReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"CgxQcm90b3MucHJvdG8SCG1lc3NhZ2VzGhhQcm90by5BY3Rvci9Qcm90b3Mu",
"cHJvdG8iBwoFU3RhcnQiKQoLU3RhcnRSZW1vdGUSGgoGU2VuZGVyGAEgASgL",
"MgouYWN0b3IuUElEIgYKBFBpbmciBgoEUG9uZ0ILqgIITWVzc2FnZXNiBnBy",
"b3RvMw=="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { global::Proto.ProtosReflection.Descriptor, },
new pbr::GeneratedClrTypeInfo(null, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.Start), global::Messages.Start.Parser, null, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.StartRemote), global::Messages.StartRemote.Parser, new[]{ "Sender" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.Ping), global::Messages.Ping.Parser, null, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Messages.Pong), global::Messages.Pong.Parser, null, null, null, null)
}));
}
#endregion
}
#region Messages
public sealed partial class Start : pb::IMessage<Start> {
private static readonly pb::MessageParser<Start> _parser = new pb::MessageParser<Start>(() => new Start());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Start> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Start() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Start(Start other) : this() {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Start Clone() {
return new Start(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Start);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Start other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Start other) {
if (other == null) {
return;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
}
}
}
}
public sealed partial class StartRemote : pb::IMessage<StartRemote> {
private static readonly pb::MessageParser<StartRemote> _parser = new pb::MessageParser<StartRemote>(() => new StartRemote());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<StartRemote> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public StartRemote() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public StartRemote(StartRemote other) : this() {
Sender = other.sender_ != null ? other.Sender.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public StartRemote Clone() {
return new StartRemote(this);
}
/// <summary>Field number for the "Sender" field.</summary>
public const int SenderFieldNumber = 1;
private global::Proto.PID sender_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Proto.PID Sender {
get { return sender_; }
set {
sender_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as StartRemote);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(StartRemote other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Sender, other.Sender)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (sender_ != null) hash ^= Sender.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (sender_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Sender);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (sender_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Sender);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(StartRemote other) {
if (other == null) {
return;
}
if (other.sender_ != null) {
if (sender_ == null) {
sender_ = new global::Proto.PID();
}
Sender.MergeFrom(other.Sender);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (sender_ == null) {
sender_ = new global::Proto.PID();
}
input.ReadMessage(sender_);
break;
}
}
}
}
}
public sealed partial class Ping : pb::IMessage<Ping> {
private static readonly pb::MessageParser<Ping> _parser = new pb::MessageParser<Ping>(() => new Ping());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Ping> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Ping() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Ping(Ping other) : this() {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Ping Clone() {
return new Ping(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Ping);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Ping other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Ping other) {
if (other == null) {
return;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
}
}
}
}
public sealed partial class Pong : pb::IMessage<Pong> {
private static readonly pb::MessageParser<Pong> _parser = new pb::MessageParser<Pong>(() => new Pong());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<Pong> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Messages.ProtosReflection.Descriptor.MessageTypes[3]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Pong() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Pong(Pong other) : this() {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public Pong Clone() {
return new Pong(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as Pong);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(Pong other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(Pong other) {
if (other == null) {
return;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Modeling;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.CreateClose;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.CreditMgmt;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.Encryption;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.Handle;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.Leasing;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.Replay;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.ResilientHandle;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.SessionMgmt;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.Signing;
using Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Adapter.TreeMgmt;
using Microsoft.Protocols.TestTools.StackSdk.FileAccessService.Smb2;
namespace Microsoft.Protocols.TestSuites.FileSharing.SMB2Model.Model
{
/// <summary>
/// The state enumeration for SMB2 model.
/// </summary>
public enum ModelState
{
/// <summary>
/// Model is not initialized.
/// </summary>
Uninitialized,
/// <summary>
/// Model is initialized.
/// </summary>
Initialized,
/// <summary>
/// The model is ready for test.
/// </summary>
Connected,
/// <summary>
/// The connection has been disconnected.
/// </summary>
Disconnected,
}
public enum ModelSessionState
{
/// <summary>
///
/// </summary>
None,
/// <summary>
///
/// </summary>
InProgress,
/// <summary>
///
/// </summary>
Valid,
}
/// <summary>
/// Abstract a connection object in the model
/// </summary>
public class ModelConnection
{
/// <summary>
/// State of the connection
/// </summary>
public ModelState ConnectionState { get; set; }
/// <summary>
/// Dialect selected for the connection after negotiation
/// </summary>
public DialectRevision NegotiateDialect { get; protected set; }
/// <summary>
/// Indicates the session
/// </summary>
public ModelSession Session { get; set; }
/// <summary>
/// Request sent through the connection
/// </summary>
public ModelSMB2Request Request { get; set; }
/// <summary>
/// A Boolean that, if set, indicates that authentication to a non-anonymous principal has not yet been successfully performed on this connection.
/// </summary>
public bool ConstrainedConnection { get; set; }
public ModelConnection(DialectRevision dialect)
{
this.ConnectionState = ModelState.Initialized;
this.NegotiateDialect = dialect;
this.Session = null;
this.Request = null;
this.ConstrainedConnection = false;
}
public override string ToString()
{
return string.Format("({0}: {1}, {2}: {3}, {4}: {5})",
"ConnectionState", ConnectionState, "NegotiateDialect", NegotiateDialect, "Session", Session);
}
}
/// <summary>
/// Abstract a session object in the model
/// </summary>
public class ModelSession
{
public ModelSessionState State { get; set; }
public DialectRevision Dialect { get; set; }
public ModelSessionId SessionId { get; set; }
public ModelSession()
{
}
public override string ToString()
{
return string.Format("({0}: {1}, {2}: {3}, {4}: {5})",
"State", State, "Dialect", Dialect, "SessionId", SessionId);
}
}
public class ModelOpen
{
public bool IsDurable { get; set; }
public bool IsResilient { get; set; }
public ModelUser DurableOwner { get; set; }
public ModelOpen(bool isDurable)
{
IsDurable = isDurable;
}
public override string ToString()
{
return string.Format("Open by {0}{1}{2}{3}{4}",
DurableOwner,
IsResilient || IsDurable? " with " : "",
IsDurable? "Durable" : "",
IsResilient && IsDurable? " and " : "",
IsResilient? "Resilient" : "");
}
}
/// <summary>
///
/// </summary>
public class ModelOplockOpen
{
public OplockLevel_Values OplockLevel { get; set; }
public OplockState OplockState { get; set; }
public ModelOplockOpen()
{
}
}
#region Abstract SMB2 Request
/// <summary>
/// The class of the ModelSMB2Request includes all SMB2 request methods.
/// </summary>
public abstract class ModelSMB2Request
{
/// <summary>
/// A value that identify a message request and response uniquely across
/// all messages sent on the same SMB2 Protocol transport connection.
/// </summary>
public long messageID;
/// <summary>
/// Indicates that whether the command client wants to cancel is an asynchronously processed command.
/// </summary>
public bool isAsync;
/// <summary>
/// Indicates that whether cancel an asynchronously processed command.
/// </summary>
public bool isCanceling;
/// <summary>
/// The request message of the SMB2.
/// </summary>
/// <param name="messageID">A value that identify a message request uniquely across
/// all messages sent on the same SMB2 Protocol transport connection.</param>
protected ModelSMB2Request(long messageID)
{
this.messageID = messageID;
}
}
#endregion
#region SMB2 Requests
#region Negotiate
/// <summary>
/// This class is used to Negotiate request,
/// Negotiate request is used by the client to notify the server what dialects of the SMB Protocol the client understands.
/// </summary>
public class ModelComNegotiateRequest : ModelSMB2Request
{
public Sequence<string> Dialects;
/// <summary>
/// This method is used to send a negotiate request.
/// </summary>
/// <param name="messageID"> A value that identify a message request and response uniquely across
/// all messages sent on the same SMB Protocol transport connection. </param>
/// <param name="dialects"> Dialects request contains. </param>
public ModelComNegotiateRequest(Sequence<string> dialects)
: base(0)
{
this.Dialects = dialects;
}
}
/// <summary>
/// This class is used to Negotiate request,
/// Negotiate request is used by the client to notify the server what dialects of the SMB Protocol the client understands.
/// </summary>
public class NegotiateRequest : ModelSMB2Request
{
public Sequence<DialectRevision> Dialects;
/// <summary>
/// This method is used to send a negotiate request.
/// </summary>
/// <param name="messageID"> A value that identify a message request and response uniquely across
/// all messages sent on the same SMB Protocol transport connection. </param>
/// <param name="dialects"> Dialects request contains. </param>
public NegotiateRequest(Sequence<DialectRevision> dialects)
: base(0)
{
this.Dialects = dialects;
}
}
#endregion
#region Leasing
/// <summary>
/// This class is used to Create Lease request,
/// Create Lease request is used by the client to request either creation of or access to a file with lease.
/// </summary>
public class ModelCreateLeaseRequest : ModelSMB2Request
{
public CreateOptions_Values CreateOptions;
public RequestedOplockLevel_Values RequestedOplockLevel;
public LeaseContextType ContextType;
public ModelCreateContextRequest LeaseContext;
public ModelCreateLeaseRequest(
CreateOptions_Values createOptions,
RequestedOplockLevel_Values requestedOplockLevel_Values,
ModelCreateContextRequest leaseContext,
LeaseContextType leaseContextType)
: base(0)
{
this.CreateOptions = createOptions;
this.RequestedOplockLevel = requestedOplockLevel_Values;
this.LeaseContext = leaseContext;
this.ContextType = leaseContextType;
}
}
/// <summary>
/// This class is used to Lease Break Acknowledgment request,
/// Lease Break Acknowledgment request is used by the client in response to an SMB2 Lease Break Notification packet sent by the server.
/// </summary>
public class ModelLeaseBreakAckRequest : ModelSMB2Request
{
public ModelLeaseKeyType modelLeaseKeyType;
public uint LeaseState;
public ModelLeaseBreakAckRequest(
ModelLeaseKeyType modelLeaseKeyType,
uint leaseState)
: base(0)
{
this.modelLeaseKeyType = modelLeaseKeyType;
this.LeaseState = leaseState;
}
}
/// <summary>
/// This class is used to request a file operation.
/// </summary>
public class ModelFileOperationRequest : ModelSMB2Request
{
public FileOperation Operation;
public OperatorType OptorType;
public ModelDialectRevision Dialect;
public ModelFileOperationRequest(
FileOperation operation,
OperatorType operatorType,
ModelDialectRevision dialect)
: base(0)
{
this.Operation = operation;
this.OptorType = operatorType;
this.Dialect = dialect;
}
}
#endregion
#region Credit Management
/// <summary>
/// This class is used to request a credit operation
/// </summary>
public class ModelCreditOperationRequest : ModelSMB2Request
{
public ModelMidType midType;
public ModelCreditCharge creditCharge;
public ModelCreditRequestNum creditRequestNum;
public ModelPayloadSize payloadSize;
public ModelPayloadType payloadType;
public ModelCreditOperationRequest(
ModelMidType midType,
ModelCreditCharge creditCharge,
ModelCreditRequestNum creditRequestNum,
ModelPayloadSize payloadSize,
ModelPayloadType payloadType)
: base(0)
{
this.midType = midType;
this.creditCharge = creditCharge;
this.creditRequestNum = creditRequestNum;
this.payloadSize = payloadSize;
this.payloadType = payloadType;
}
}
#endregion
#region Encryption
/// <summary>
/// This class is used to request a treeconnect operation
/// </summary>
public class ModelTreeConnectRequest : ModelSMB2Request
{
public ConnectToShareType connectToShareType;
public ModelRequestType modelRequestType;
public ModelTreeConnectRequest(
ConnectToShareType connectToShareType,
ModelRequestType modelRequestType)
: base(0)
{
this.connectToShareType = connectToShareType;
this.modelRequestType = modelRequestType;
}
}
/// <summary>
/// This class is used to request a create file operation
/// </summary>
public class ModelFileOperationVerifyEncryptionRequest : ModelSMB2Request
{
public ModelRequestType modelRequestType;
public ModelFileOperationVerifyEncryptionRequest(
ModelRequestType modelRequestType)
: base(0)
{
this.modelRequestType = modelRequestType;
}
}
#endregion
#region Session Management
public class ModelSessionSetupRequest : ModelSMB2Request
{
public ModelConnectionId connectionId;
public ModelSessionId sessionId;
public ModelSessionId previousSessionId;
public bool isSigned;
public ModelFlags flags;
public ModelUser user;
public ModelSessionSetupRequest(
ModelConnectionId connectionId,
ModelSessionId sessionId,
ModelSessionId previousSessionId,
bool isSigned,
ModelFlags flags,
ModelUser user
)
: base(0)
{
this.connectionId = connectionId;
this.sessionId = sessionId;
this.previousSessionId = previousSessionId;
this.isSigned = isSigned;
this.flags = flags;
this.user = user;
}
}
public class ModelLogOffRequest : ModelSMB2Request
{
public ModelConnectionId connectionId;
public ModelSessionId sessionId;
public ModelLogOffRequest(ModelConnectionId connectionId, ModelSessionId sessionId)
: base(0)
{
this.connectionId = connectionId;
this.sessionId = sessionId;
}
}
#endregion
#region TreeMgmt
public class ModelTreeMgmtTreeConnectRequest : ModelSMB2Request
{
public ModelSharePath sharePath;
public ModelTreeMgmtTreeConnectRequest(ModelSharePath sharePath)
: base(0)
{
this.sharePath = sharePath;
}
}
public class ModelTreeMgmtTreeDisconnectRequest : ModelSMB2Request
{
public ModelTreeId treeId;
public ModelTreeMgmtTreeDisconnectRequest(ModelTreeId treeId)
: base(0)
{
this.treeId = treeId;
}
}
#endregion
#region Resilient Handle
public class ModelResiliencyRequest : ModelSMB2Request
{
public IoCtlInputCount InputCount { get; set; }
public ResilientTimeout Timeout { get; set; }
public ModelResiliencyRequest(
IoCtlInputCount inputCount,
ResilientTimeout timeout)
: base(0)
{
InputCount = inputCount;
Timeout = timeout;
}
}
public class ModelReEstablishResilientOpenRequest : ModelSMB2Request
{
public ModelUser User { get; set; }
public ModelReEstablishResilientOpenRequest(ModelUser user)
: base(0)
{
User = user;
}
}
#region Oplock
public class ModelRequestOplockAndTriggerBreakRequest : ModelSMB2Request
{
public RequestedOplockLevel_Values RequestedOplockLevel;
public ModelRequestOplockAndTriggerBreakRequest(RequestedOplockLevel_Values requestedOplockLevel)
: base(0)
{
this.RequestedOplockLevel = requestedOplockLevel;
}
}
public class ModelOplockBreakAcknowledgementRequest : ModelSMB2Request
{
public bool VolatilePortionFound;
public bool PersistentMatchesDurableFileId;
public OplockLevel_Values OplockLevel;
public ModelOplockBreakAcknowledgementRequest(bool volatilePortionFound, bool persistentMatchesDurableFileId, OplockLevel_Values oplockLevel)
:base(0)
{
this.VolatilePortionFound = volatilePortionFound;
this.PersistentMatchesDurableFileId = persistentMatchesDurableFileId;
this.OplockLevel = oplockLevel;
}
}
#endregion
#region Handle
/// <summary>
/// This class is used to request an open file operation
/// </summary>
public class ModelOpenFileRequest : ModelSMB2Request
{
public DurableV1RequestContext durableV1RequestContext;
public DurableV2RequestContext durableV2RequestContext;
public DurableV1ReconnectContext durableV1ReconnectContext;
public DurableV2ReconnectContext durableV2ReconnectContext;
public OplockLeaseType oplockLeaseType;
public bool isSameLeaseKey;
public bool isSameClient;
public bool isSameCreateGuid;
public ModelOpenFileRequest(
DurableV1RequestContext durableV1RequestContext,
DurableV2RequestContext durableV2RequestContext,
DurableV1ReconnectContext durableV1ReconnectContext,
DurableV2ReconnectContext durableV2ReconnectContext,
OplockLeaseType oplockLeaseType,
bool isSameLeaseKey,
bool isSameClient,
bool isSameCreateGuid)
: base(0)
{
this.durableV1RequestContext = durableV1RequestContext;
this.durableV2RequestContext = durableV2RequestContext;
this.durableV1ReconnectContext = durableV1ReconnectContext;
this.durableV2ReconnectContext = durableV2ReconnectContext;
this.oplockLeaseType = oplockLeaseType;
this.isSameLeaseKey = isSameLeaseKey;
this.isSameClient = isSameClient;
this.isSameCreateGuid = isSameCreateGuid;
}
}
#endregion
#endregion
#region CreateClose
public class ModelCreateRequest: ModelSMB2Request
{
public CreateFileNameType NameType;
public CreateOptionsFileOpenReparsePointType FileOpenReparsePointType;
public CreateOptionsFileDeleteOnCloseType FileDeleteOnCloseType;
public CreateContextType ContextType;
public ImpersonationLevelType ImpersonationType;
public ModelCreateRequest(
CreateFileNameType nameType,
CreateOptionsFileOpenReparsePointType fileOpenReparsePointType,
CreateOptionsFileDeleteOnCloseType fileDeleteOnCloseType,
CreateContextType contextType,
ImpersonationLevelType impersonationType):base(0)
{
NameType = nameType;
FileOpenReparsePointType = fileOpenReparsePointType;
FileDeleteOnCloseType = fileDeleteOnCloseType;
ContextType = contextType;
ImpersonationType = impersonationType;
}
}
public class ModelCloseRequest: ModelSMB2Request
{
public CloseFlagType CloseType;
public FileIdVolatileType VolatileType;
public FileIdPersistentType PersistentType;
public ModelCloseRequest(
CloseFlagType closeType,
FileIdVolatileType volatileType,
FileIdPersistentType persistentType):base(0)
{
CloseType = closeType;
VolatileType = volatileType;
PersistentType = persistentType;
}
}
#endregion
#region Signing
/// <summary>
/// This class is used to request in SigningModel
/// </summary>
public class SigningModelRequest : ModelSMB2Request
{
public SigningFlagType signingFlagType;
public SigningEnabledType signingEnabledType;
public SigningRequiredType signingRequiredType;
public SigningModelRequest(
SigningFlagType signingFlagType)
: base(0)
{
this.signingFlagType = signingFlagType;
}
public SigningModelRequest(
SigningFlagType signingFlagType,
SigningEnabledType signingEnabledType,
SigningRequiredType signingRequiredType)
: base(0)
{
this.signingFlagType = signingFlagType;
this.signingEnabledType = signingEnabledType;
this.signingRequiredType = signingRequiredType;
}
}
#endregion
#region Replay
public class ModelReplayChannel
{
/// <summary>
/// The dialect revision after negotiation.
/// </summary>
public DialectRevision Connection_NegotiateDialect;
/// <summary>
/// The type of connected share.
/// </summary>
public ReplayModelShareType Connection_Session_TreeConnect_Share_IsCA;
public bool Connection_ClientCapabilities_SupportPersistent;
public ModelReplayChannel(DialectRevision Connection_NegotiateDialect)
{
this.Connection_NegotiateDialect = Connection_NegotiateDialect;
}
public ModelReplayChannel(DialectRevision Connection_NegotiateDialect,
ReplayModelShareType Connection_Session_TreeConnect_Share_IsCA,
bool Connection_ClientCapabilities_SupportPersistent)
{
this.Connection_NegotiateDialect = Connection_NegotiateDialect;
this.Connection_Session_TreeConnect_Share_IsCA = Connection_Session_TreeConnect_Share_IsCA;
this.Connection_ClientCapabilities_SupportPersistent = Connection_ClientCapabilities_SupportPersistent;
}
}
public class ModelReplayCreateRequest : ModelSMB2Request
{
public ModelReplayChannel channel;
public ReplayModelSwitchChannelType switchChannelType;
public ReplayModelChannelSequenceType channelSequence;
public ReplayModelDurableHandle modelDurableHandle;
public ReplayModelRequestedOplockLevel requestedOplockLevel;
public ReplayModelFileName fileName;
public ReplayModelCreateGuid createGuid;
public ReplayModelFileAttributes fileAttributes;
public ReplayModelCreateDisposition createDisposition;
public ReplayModelLeaseState leaseState;
public ReplayModelSetReplayFlag isSetReplayFlag;
public ReplayModelLeaseKey leaseKey;
public ModelReplayCreateRequest(ModelReplayChannel channel,
ReplayModelSwitchChannelType switchChannelType,
ReplayModelChannelSequenceType channelSequence,
ReplayModelDurableHandle modelDurableHandle,
ReplayModelRequestedOplockLevel requestedOplockLevel,
ReplayModelFileName fileName,
ReplayModelCreateGuid createGuid,
ReplayModelFileAttributes fileAttributes,
ReplayModelCreateDisposition createDisposition,
ReplayModelLeaseState leaseState,
ReplayModelSetReplayFlag isSetReplayFlag,
ReplayModelLeaseKey leaseKey)
: base(0)
{
this.channel = channel;
this.switchChannelType = switchChannelType;
this.channelSequence = channelSequence;
this.modelDurableHandle = modelDurableHandle;
this.requestedOplockLevel = requestedOplockLevel;
this.fileName = fileName;
this.createGuid = createGuid;
this.fileAttributes = fileAttributes;
this.createDisposition = createDisposition;
this.leaseState = leaseState;
this.isSetReplayFlag = isSetReplayFlag;
this.leaseKey = leaseKey;
}
}
public class ModelReplayFileOperationRequest : ModelSMB2Request
{
public ModelReplayChannel channel;
public ReplayModelSwitchChannelType switchChannelType;
public ModelDialectRevision maxSmbVersionClientSupported;
public ReplayModelRequestCommand requestCommand;
public ReplayModelChannelSequenceType channelSequence;
public ReplayModelSetReplayFlag isSetReplayFlag;
public ReplayModelRequestCommandParameters requestCommandParameters;
public ModelReplayFileOperationRequest(ModelReplayChannel channel,
ReplayModelSwitchChannelType switchChannelType,
ModelDialectRevision maxSmbVersionClientSupported,
ReplayModelRequestCommand requestCommand,
ReplayModelChannelSequenceType channelSequence,
ReplayModelSetReplayFlag isSetReplayFlag,
ReplayModelRequestCommandParameters requestCommandParameters
)
: base(0)
{
this.channel = channel;
this.switchChannelType = switchChannelType;
this.requestCommand = requestCommand;
this.channelSequence = channelSequence;
this.isSetReplayFlag = isSetReplayFlag;
this.requestCommandParameters = requestCommandParameters;
}
}
#endregion
#endregion
}
| |
// ***********************************************************************
// Copyright (c) 2012 Charlie Poole, Rob Prouse
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// ***********************************************************************
#if PARALLEL
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Threading;
#if NET20 || NET35
using ManualResetEventSlim = System.Threading.ManualResetEvent;
#endif
namespace NUnit.Framework.Internal.Execution
{
/// <summary>
/// WorkItemQueueState indicates the current state of a WorkItemQueue
/// </summary>
public enum WorkItemQueueState
{
/// <summary>
/// The queue is paused
/// </summary>
Paused,
/// <summary>
/// The queue is running
/// </summary>
Running,
/// <summary>
/// The queue is stopped
/// </summary>
Stopped
}
/// <summary>
/// A WorkItemQueue holds work items that are ready to
/// be run, either initially or after some dependency
/// has been satisfied.
/// </summary>
public class WorkItemQueue
{
private const int SPIN_COUNT = 5;
// Although the code makes the number of levels relatively
// easy to change, it is still baked in as a constant at
// this time. If we wanted to make it variable, that would
// be a bit more work, which does not now seem necessary.
private const int HIGH_PRIORITY = 0;
private const int NORMAL_PRIORITY = 1;
private const int PRIORITY_LEVELS = 2;
private Logger log = InternalTrace.GetLogger("WorkItemQueue");
private ConcurrentQueue<WorkItem>[] _innerQueues;
private class SavedState
{
public ConcurrentQueue<WorkItem>[] InnerQueues;
public SavedState(WorkItemQueue queue)
{
InnerQueues = queue._innerQueues;
}
}
private Stack<SavedState> _savedState = new Stack<SavedState>();
/* This event is used solely for the purpose of having an optimized sleep cycle when
* we have to wait on an external event (Add or Remove for instance)
*/
private readonly ManualResetEventSlim _mreAdd = new ManualResetEventSlim(false);
/* The whole idea is to use these two values in a transactional
* way to track and manage the actual data inside the underlying lock-free collection
* instead of directly working with it or using external locking.
*
* They are manipulated with CAS and are guaranteed to increase over time and use
* of the instance thus preventing ABA problems.
*/
private int _addId = int.MinValue;
private int _removeId = int.MinValue;
#if APARTMENT_STATE
/// <summary>
/// Initializes a new instance of the <see cref="WorkItemQueue"/> class.
/// </summary>
/// <param name="name">The name of the queue.</param>
/// <param name="isParallel">Flag indicating whether this is a parallel queue</param>
/// <param name="apartment">ApartmentState to use for items on this queue</param>
public WorkItemQueue(string name, bool isParallel, ApartmentState apartment)
#else
/// <summary>
/// Initializes a new instance of the <see cref="WorkItemQueue"/> class.
/// </summary>
/// <param name="name">The name of the queue.</param>
/// <param name="isParallel">Flag indicating whether this is a parallel queue</param>
public WorkItemQueue(string name, bool isParallel)
#endif
{
Name = name;
IsParallelQueue = isParallel;
#if APARTMENT_STATE
TargetApartment = apartment;
#endif
State = WorkItemQueueState.Paused;
ItemsProcessed = 0;
InitializeQueues();
}
private void InitializeQueues()
{
_innerQueues = new ConcurrentQueue<WorkItem>[PRIORITY_LEVELS];
for (int i = 0; i < PRIORITY_LEVELS; i++)
_innerQueues[i] = new ConcurrentQueue<WorkItem>();
}
#region Properties
/// <summary>
/// Gets the name of the work item queue.
/// </summary>
public string Name { get; private set; }
/// <summary>
/// Gets a flag indicating whether this queue is used for parallel execution
/// </summary>
public bool IsParallelQueue { get; private set; }
#if APARTMENT_STATE
/// <summary>
/// Gets the target ApartmentState for work items on this queue
/// </summary>
public ApartmentState TargetApartment { get; private set; }
#endif
private int _itemsProcessed;
/// <summary>
/// Gets the total number of items processed so far
/// </summary>
public int ItemsProcessed
{
get { return _itemsProcessed; }
private set { _itemsProcessed = value; }
}
private int _state;
/// <summary>
/// Gets the current state of the queue
/// </summary>
public WorkItemQueueState State
{
get { return (WorkItemQueueState)_state; }
private set { _state = (int)value; }
}
/// <summary>
/// Get a bool indicating whether the queue is empty.
/// </summary>
public bool IsEmpty
{
get
{
foreach (var q in _innerQueues)
if (!q.IsEmpty)
return false;
return true;
}
}
#endregion
#region Public Methods
/// <summary>
/// Enqueue a WorkItem to be processed
/// </summary>
/// <param name="work">The WorkItem to process</param>
public void Enqueue(WorkItem work)
{
Enqueue(work, work is CompositeWorkItem.OneTimeTearDownWorkItem ? HIGH_PRIORITY : NORMAL_PRIORITY);
}
/// <summary>
/// Enqueue a WorkItem to be processed - internal for testing
/// </summary>
/// <param name="work">The WorkItem to process</param>
/// <param name="priority">The priority at which to process the item</param>
internal void Enqueue(WorkItem work, int priority)
{
Guard.ArgumentInRange(priority >= 0 && priority < PRIORITY_LEVELS,
"Invalid priority specified", "priority");
do
{
int cachedAddId = _addId;
// Validate that we have are the current enqueuer
if (Interlocked.CompareExchange(ref _addId, cachedAddId + 1, cachedAddId) != cachedAddId)
continue;
// Add to the collection
_innerQueues[priority].Enqueue(work);
// Wake up threads that may have been sleeping
_mreAdd.Set();
return;
} while (true);
}
/// <summary>
/// Dequeue a WorkItem for processing
/// </summary>
/// <returns>A WorkItem or null if the queue has stopped</returns>
public WorkItem Dequeue()
{
SpinWait sw = new SpinWait();
do
{
WorkItemQueueState cachedState = State;
if (cachedState == WorkItemQueueState.Stopped)
return null; // Tell worker to terminate
int cachedRemoveId = _removeId;
int cachedAddId = _addId;
// Empty case (or paused)
if (cachedRemoveId == cachedAddId || cachedState == WorkItemQueueState.Paused)
{
// Spin a few times to see if something changes
if (sw.Count <= SPIN_COUNT)
{
sw.SpinOnce();
}
else
{
// Reset to wait for an enqueue
_mreAdd.Reset();
// Recheck for an enqueue to avoid a Wait
if ((cachedRemoveId != _removeId || cachedAddId != _addId) && cachedState != WorkItemQueueState.Paused)
{
// Queue is not empty, set the event
_mreAdd.Set();
continue;
}
// Wait for something to happen
_mreAdd.Wait(500);
}
continue;
}
// Validate that we are the current dequeuer
if (Interlocked.CompareExchange(ref _removeId, cachedRemoveId + 1, cachedRemoveId) != cachedRemoveId)
continue;
// Dequeue our work item
WorkItem work = null;
while (work == null)
foreach (var q in _innerQueues)
if (q.TryDequeue(out work))
break;
// Add to items processed using CAS
Interlocked.Increment(ref _itemsProcessed);
return work;
} while (true);
}
/// <summary>
/// Start or restart processing of items from the queue
/// </summary>
public void Start()
{
log.Info("{0}.{1} starting", Name, _savedState.Count);
if (Interlocked.CompareExchange(ref _state, (int)WorkItemQueueState.Running, (int)WorkItemQueueState.Paused) == (int)WorkItemQueueState.Paused)
_mreAdd.Set();
}
/// <summary>
/// Signal the queue to stop
/// </summary>
public void Stop()
{
log.Info("{0}.{1} stopping - {2} WorkItems processed", Name, _savedState.Count, ItemsProcessed);
if (Interlocked.Exchange(ref _state, (int)WorkItemQueueState.Stopped) != (int)WorkItemQueueState.Stopped)
_mreAdd.Set();
}
/// <summary>
/// Pause the queue for restarting later
/// </summary>
public void Pause()
{
log.Debug("{0}.{1} pausing", Name, _savedState.Count);
Interlocked.CompareExchange(ref _state, (int)WorkItemQueueState.Paused, (int)WorkItemQueueState.Running);
}
/// <summary>
/// Save the current inner queue and create new ones for use by
/// a non-parallel fixture with parallel children.
/// </summary>
internal void Save()
{
bool isRunning = State == WorkItemQueueState.Running;
if (isRunning)
Pause();
_savedState.Push(new SavedState(this));
InitializeQueues();
if (isRunning)
Start();
}
/// <summary>
/// Restore the inner queue that was previously saved
/// </summary>
internal void Restore()
{
// TODO: Originally, the following Guard statement was used. In theory, no queues should be running
// when we are doing a restore. It appears, however, that we end the shift, pausing queues, buy that
// a thread may then sneak in and restart some of them. My tests pass without the guard but I'm still
// concerned to understand what is happening and why. I'm leaving this commented out so that somebody
// else can take a look at it later on.
//Guard.OperationValid(State != WorkItemQueueState.Running, $"Attempted to restore state of {Name} while queue was running.");
var savedQueues = _savedState.Pop().InnerQueues;
// If there are any queued items, copy to the next lower level
for (int i = 0; i < PRIORITY_LEVELS; i++)
{
WorkItem work;
while (_innerQueues[i].TryDequeue(out work))
savedQueues[i].Enqueue(work);
}
_innerQueues = savedQueues;
}
#endregion
#region Internal Methods for Testing
internal string DumpContents()
{
var sb = new System.Text.StringBuilder();
sb.AppendLine($"Contents of {Name} at isolation level {_savedState.Count}");
if (IsEmpty)
sb.AppendLine(" <empty>");
else
for (int priority = 0; priority < PRIORITY_LEVELS; priority++)
{
foreach (WorkItem work in _innerQueues[priority])
sb.AppendLine($"pri-{priority}: {work.Name}");
}
int level = 0;
foreach (var state in _savedState)
{
sb.AppendLine($"Saved State {level++}");
bool isEmpty = true;
for (int priority = 0; priority < PRIORITY_LEVELS; priority++)
{
foreach (WorkItem work in state.InnerQueues[priority])
{
sb.AppendLine($"pri-{priority}: {work.Name}");
isEmpty = false;
}
}
if (isEmpty)
sb.AppendLine(" <empty>");
}
return sb.ToString();
}
#endregion
}
#if NET20 || NET35
internal static class ManualResetEventExtensions
{
public static bool Wait (this ManualResetEvent mre, int millisecondsTimeout)
{
return mre.WaitOne(millisecondsTimeout, false);
}
}
#endif
}
#endif
| |
using System;
using System.Collections.Generic;
using UnityEditorInternal;
using UnityEngine;
using UnityEditor;
using UnityEngine.Sprites;
using UnityEditor.Sprites;
namespace Anima2D
{
public class TextureEditorWindow : EditorWindow
{
public Color textureColor = Color.white;
protected class Styles
{
public readonly GUIStyle dragdot = "U2D.dragDot";
public readonly GUIStyle dragdotDimmed = "U2D.dragDotDimmed";
public readonly GUIStyle dragdotactive = "U2D.dragDotActive";
public readonly GUIStyle createRect = "U2D.createRect";
public readonly GUIStyle preToolbar = "preToolbar";
public readonly GUIStyle preButton = "preButton";
public readonly GUIStyle preLabel = "preLabel";
public readonly GUIStyle preSlider = "preSlider";
public readonly GUIStyle preSliderThumb = "preSliderThumb";
public readonly GUIStyle preBackground = "preBackground";
public readonly GUIStyle pivotdotactive = "U2D.pivotDotActive";
public readonly GUIStyle pivotdot = "U2D.pivotDot";
public readonly GUIStyle dragBorderdot = new GUIStyle();
public readonly GUIStyle dragBorderDotActive = new GUIStyle();
public readonly GUIStyle toolbar;
public readonly GUIContent alphaIcon;
public readonly GUIContent RGBIcon;
public readonly GUIStyle notice;
public readonly GUIContent smallMip;
public readonly GUIContent largeMip;
public readonly GUIContent spriteIcon;
public readonly GUIContent showBonesIcon;
Texture2D mShowBonesImage;
Texture2D showBonesImage {
get {
if(!mShowBonesImage)
{
mShowBonesImage = Resources.Load<Texture2D>("showBonesIcon");
mShowBonesImage.hideFlags = HideFlags.DontSave;
}
return mShowBonesImage;
}
}
public Styles()
{
this.toolbar = new GUIStyle(EditorStyles.inspectorDefaultMargins);
this.toolbar.margin.top = 0;
this.toolbar.margin.bottom = 0;
this.alphaIcon = EditorGUIUtility.IconContent("PreTextureAlpha");
this.RGBIcon = EditorGUIUtility.IconContent("PreTextureRGB");
this.preToolbar.border.top = 0;
this.createRect.border = new RectOffset(3, 3, 3, 3);
this.notice = new GUIStyle(GUI.skin.label);
this.notice.alignment = TextAnchor.MiddleCenter;
this.notice.normal.textColor = Color.yellow;
this.dragBorderdot.fixedHeight = 5f;
this.dragBorderdot.fixedWidth = 5f;
this.dragBorderdot.normal.background = EditorGUIUtility.whiteTexture;
this.dragBorderDotActive.fixedHeight = this.dragBorderdot.fixedHeight;
this.dragBorderDotActive.fixedWidth = this.dragBorderdot.fixedWidth;
this.dragBorderDotActive.normal.background = EditorGUIUtility.whiteTexture;
this.smallMip = EditorGUIUtility.IconContent("PreTextureMipMapLow");
this.largeMip = EditorGUIUtility.IconContent("PreTextureMipMapHigh");
this.spriteIcon = EditorGUIUtility.IconContent("Sprite Icon");
this.spriteIcon.tooltip = "Reset Sprite";
this.showBonesIcon = new GUIContent(showBonesImage);
this.showBonesIcon.tooltip = "Show Bones";
}
}
public static string s_NoSelectionWarning = "No sprite selected";
protected const float k_BorderMargin = 10f;
protected const float k_ScrollbarMargin = 16f;
protected const float k_InspectorWindowMargin = 8f;
protected const float k_InspectorWidth = 330f;
protected const float k_InspectorHeight = 148f;
protected const float k_MinZoomPercentage = 0.9f;
protected const float k_MaxZoom = 10f;
protected const float k_WheelZoomSpeed = 0.03f;
protected const float k_MouseZoomSpeed = 0.005f;
protected static Styles s_Styles;
protected Texture2D m_Texture;
protected Rect m_TextureViewRect;
protected Rect m_TextureRect;
protected bool m_ShowAlpha;
protected float m_Zoom = -1f;
protected float m_MipLevel;
protected Vector2 m_ScrollPosition = default(Vector2);
private static Material s_HandleWireMaterial;
private static Material s_HandleWireMaterial2D;
static Material handleWireMaterial
{
get
{
if (!s_HandleWireMaterial)
{
s_HandleWireMaterial = (Material)EditorGUIUtility.LoadRequired("SceneView/HandleLines.mat");
s_HandleWireMaterial2D = (Material)EditorGUIUtility.LoadRequired("SceneView/2DHandleLines.mat");
}
return (!Camera.current) ? s_HandleWireMaterial2D : s_HandleWireMaterial;
}
}
static Texture2D transparentCheckerTexture
{
get
{
if (EditorGUIUtility.isProSkin)
{
return EditorGUIUtility.LoadRequired("Previews/Textures/textureCheckerDark.png") as Texture2D;
}
return EditorGUIUtility.LoadRequired("Previews/Textures/textureChecker.png") as Texture2D;
}
}
protected Rect maxScrollRect
{
get
{
float num = (float)this.m_Texture.width * 0.5f * this.m_Zoom;
float num2 = (float)this.m_Texture.height * 0.5f * this.m_Zoom;
return new Rect(-num, -num2, this.m_TextureViewRect.width + num * 2f, this.m_TextureViewRect.height + num2 * 2f);
}
}
protected Rect maxRect
{
get
{
float num = this.m_TextureViewRect.width * 0.5f / this.GetMinZoom();
float num2 = this.m_TextureViewRect.height * 0.5f / this.GetMinZoom();
float left = -num;
float top = -num2;
float width = (float)this.m_Texture.width + num * 2f;
float height = (float)this.m_Texture.height + num2 * 2f;
return new Rect(left, top, width, height);
}
}
protected void InitStyles()
{
if (s_Styles == null)
{
s_Styles = new Styles();
}
}
protected float GetMinZoom()
{
if (this.m_Texture == null)
{
return 1f;
}
return Mathf.Min(this.m_TextureViewRect.width / (float)this.m_Texture.width, this.m_TextureViewRect.height / (float)this.m_Texture.height) * 0.9f;
}
protected virtual void HandleZoom()
{
bool flag = Event.current.alt && Event.current.button == 1;
if (flag)
{
EditorGUIUtility.AddCursorRect(this.m_TextureViewRect, MouseCursor.Zoom);
}
if (((Event.current.type == EventType.MouseUp || Event.current.type == EventType.MouseDown) && flag) || ((Event.current.type == EventType.KeyUp || Event.current.type == EventType.KeyDown) && Event.current.keyCode == KeyCode.LeftAlt))
{
base.Repaint();
}
if (Event.current.type == EventType.ScrollWheel || (Event.current.type == EventType.MouseDrag && Event.current.alt && Event.current.button == 1))
{
float zoomMultiplier = 1f - Event.current.delta.y * ((Event.current.type != EventType.ScrollWheel) ? -0.005f : 0.03f);
float wantedZoom = this.m_Zoom * zoomMultiplier;
float currentZoom = Mathf.Clamp(wantedZoom, this.GetMinZoom(), 10f);
if (currentZoom != this.m_Zoom)
{
this.m_Zoom = currentZoom;
if (wantedZoom != currentZoom)
zoomMultiplier /= wantedZoom / currentZoom;
Vector3 textureHalfSize = new Vector2(m_Texture.width, m_Texture.height) * 0.5f;
Vector3 mousePositionWorld = Handles.inverseMatrix.MultiplyPoint3x4(Event.current.mousePosition);
Vector3 delta = (mousePositionWorld - textureHalfSize) * (zoomMultiplier - 1f);
m_ScrollPosition += (Vector2)Handles.matrix.MultiplyVector(delta);
Event.current.Use();
}
}
}
protected void HandlePanning()
{
bool flag = (!Event.current.alt && Event.current.button > 0) || (Event.current.alt && Event.current.button <= 0);
if (flag && GUIUtility.hotControl == 0)
{
EditorGUIUtility.AddCursorRect(this.m_TextureViewRect, MouseCursor.Pan);
if (Event.current.type == EventType.MouseDrag)
{
this.m_ScrollPosition -= Event.current.delta;
Event.current.Use();
}
}
if (((Event.current.type == EventType.MouseUp || Event.current.type == EventType.MouseDown) && flag) || ((Event.current.type == EventType.KeyUp || Event.current.type == EventType.KeyDown) && Event.current.keyCode == KeyCode.LeftAlt))
{
base.Repaint();
}
}
public void DrawLine(Vector3 p1, Vector3 p2)
{
GL.Vertex(p1);
GL.Vertex(p2);
}
public void BeginLines(Color color)
{
handleWireMaterial.SetPass(0);
GL.PushMatrix();
GL.MultMatrix(Handles.matrix);
GL.Begin(1);
GL.Color(color);
}
public void EndLines()
{
GL.End();
GL.PopMatrix();
}
protected void DrawTexturespaceBackground()
{
float num = Mathf.Max(this.maxRect.width, this.maxRect.height);
Vector2 b = new Vector2(this.maxRect.xMin, this.maxRect.yMin);
float num2 = num * 0.5f;
float a = (!EditorGUIUtility.isProSkin) ? 0.08f : 0.15f;
float num3 = 8f;
BeginLines(new Color(0f, 0f, 0f, a));
for (float num4 = 0f; num4 <= num; num4 += num3)
{
float x = -num2 + num4 + b.x;
float y = num2 + num4 + b.y;
Vector2 p1 = new Vector2(x,y);
x = num2 + num4 + b.x;
y = -num2 + num4 + b.y;;
Vector2 p2 = new Vector2(x, y);
DrawLine(p1, p2);
}
EndLines();
}
private float Log2(float x)
{
return (float)(Math.Log((double)x) / Math.Log(2.0));
}
protected void DrawTexture()
{
int num = Mathf.Max(this.m_Texture.width, 1);
float num2 = Mathf.Min(this.m_MipLevel, (float)(m_Texture.mipmapCount - 1));
//float mipMapBias = this.m_Texture.mipMapBias;
m_Texture.mipMapBias = (num2 - this.Log2((float)num / this.m_TextureRect.width));
//FilterMode filterMode = this.m_Texture.filterMode;
//m_Texture.filterMode = FilterMode.Point;
Rect r = m_TextureRect;
r.position -= m_ScrollPosition;
if (this.m_ShowAlpha)
{
EditorGUI.DrawTextureAlpha(r, this.m_Texture);
}
else
{
GUI.DrawTextureWithTexCoords(r, transparentCheckerTexture,
new Rect(r.width * -0.5f / (float)transparentCheckerTexture.width,
r.height * -0.5f / (float)transparentCheckerTexture.height,
r.width / (float)transparentCheckerTexture.width,
r.height / (float)transparentCheckerTexture.height), false);
GUI.color = textureColor;
GUI.DrawTexture(r, this.m_Texture);
}
//m_Texture.filterMode = filterMode;
//m_Texture.mipMapBias = mipMapBias;
}
protected void DrawScreenspaceBackground()
{
if (Event.current.type == EventType.Repaint)
{
s_Styles.preBackground.Draw(this.m_TextureViewRect, false, false, false, false);
}
}
protected void HandleScrollbars()
{
Rect position = new Rect(this.m_TextureViewRect.xMin, this.m_TextureViewRect.yMax, this.m_TextureViewRect.width, 16f);
this.m_ScrollPosition.x = GUI.HorizontalScrollbar(position, this.m_ScrollPosition.x, this.m_TextureViewRect.width, this.maxScrollRect.xMin, this.maxScrollRect.xMax);
Rect position2 = new Rect(this.m_TextureViewRect.xMax, this.m_TextureViewRect.yMin, 16f, this.m_TextureViewRect.height);
this.m_ScrollPosition.y = GUI.VerticalScrollbar(position2, this.m_ScrollPosition.y, this.m_TextureViewRect.height, this.maxScrollRect.yMin, this.maxScrollRect.yMax);
}
protected void SetupHandlesMatrix()
{
Vector3 pos = new Vector3(this.m_TextureRect.x - m_ScrollPosition.x, this.m_TextureRect.yMax - m_ScrollPosition.y, 0f);
Vector3 s = new Vector3(this.m_Zoom, -this.m_Zoom, 1f);
Handles.matrix = Matrix4x4.TRS(pos, Quaternion.identity, s);
}
protected void DoAlphaZoomToolbarGUI()
{
this.m_ShowAlpha = GUILayout.Toggle(this.m_ShowAlpha, (!this.m_ShowAlpha) ? s_Styles.RGBIcon : s_Styles.alphaIcon, "toolbarButton", new GUILayoutOption[0]);
this.m_Zoom = GUILayout.HorizontalSlider(this.m_Zoom, this.GetMinZoom(), 10f, s_Styles.preSlider, s_Styles.preSliderThumb, new GUILayoutOption[]
{
GUILayout.MaxWidth(64f)
});
int num = 1;
if (this.m_Texture != null)
{
num = Mathf.Max(num, m_Texture.mipmapCount);
}
EditorGUI.BeginDisabledGroup(num == 1);
GUILayout.Box(s_Styles.smallMip, s_Styles.preLabel, new GUILayoutOption[0]);
this.m_MipLevel = Mathf.Round(GUILayout.HorizontalSlider(this.m_MipLevel, (float)(num - 1), 0f, s_Styles.preSlider, s_Styles.preSliderThumb, new GUILayoutOption[]
{
GUILayout.MaxWidth(64f)
}));
GUILayout.Box(s_Styles.largeMip, s_Styles.preLabel, new GUILayoutOption[0]);
EditorGUI.EndDisabledGroup();
}
protected void DoTextureGUI()
{
if (m_Zoom < 0f)
m_Zoom = GetMinZoom();
m_TextureRect = new Rect(m_TextureViewRect.width / 2f - (float)m_Texture.width * m_Zoom / 2f,
m_TextureViewRect.height / 2f - (float)m_Texture.height * m_Zoom / 2f,
(float)m_Texture.width * m_Zoom,
(float)m_Texture.height * m_Zoom);
HandleScrollbars();
SetupHandlesMatrix();
DrawScreenspaceBackground();
GUI.BeginGroup(m_TextureViewRect);
HandleEvents();
if (Event.current.type == EventType.Repaint)
{
DrawTexturespaceBackground();
DrawTexture();
DrawGizmos();
}
DoTextureGUIExtras();
GUI.EndGroup();
}
protected virtual void HandleEvents()
{
}
protected virtual void DoTextureGUIExtras()
{
}
protected virtual void DrawGizmos()
{
}
protected void SetNewTexture(Texture2D texture)
{
if (texture != this.m_Texture)
{
this.m_Texture = texture;
this.m_Zoom = -1f;
}
}
protected virtual void DoToolbarGUI()
{
}
protected virtual void OnGUI()
{
if(m_Texture)
{
InitStyles();
EditorGUILayout.BeginHorizontal((GUIStyle) "Toolbar");
DoToolbarGUI();
EditorGUILayout.EndHorizontal();
m_TextureViewRect = new Rect(0f, 16f, base.position.width - 16f, base.position.height - 16f - 16f);
EditorGUILayout.BeginHorizontal();
GUILayout.FlexibleSpace();
DoTextureGUI();
EditorGUILayout.EndHorizontal();
}
}
}
}
| |
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Net;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
using Microsoft.Extensions.Options;
using Orleans.CodeGeneration;
using Orleans.Configuration;
using Orleans.Messaging;
using Orleans.Providers;
using Orleans.Runtime;
using Orleans.Runtime.Configuration;
using Orleans.Serialization;
using Orleans.Streams;
namespace Orleans
{
internal class OutsideRuntimeClient : IRuntimeClient, IDisposable, IClusterConnectionStatusListener
{
internal static bool TestOnlyThrowExceptionDuringInit { get; set; }
private readonly ILogger logger;
private readonly ClientMessagingOptions clientMessagingOptions;
private readonly ConcurrentDictionary<CorrelationId, CallbackData> callbacks;
private InvokableObjectManager localObjects;
private ClientMessageCenter transport;
private bool listenForMessages;
private CancellationTokenSource listeningCts;
private bool firstMessageReceived;
private bool disposing;
private ClientProviderRuntime clientProviderRuntime;
internal ClientStatisticsManager ClientStatistics;
private GrainId clientId;
private readonly GrainId handshakeClientId;
private ThreadTrackingStatistic incomingMessagesThreadTimeTracking;
private readonly TimeSpan typeMapRefreshInterval;
private AsyncTaskSafeTimer typeMapRefreshTimer = null;
private static readonly TimeSpan ResetTimeout = TimeSpan.FromMinutes(1);
private const string BARS = "----------";
public IInternalGrainFactory InternalGrainFactory { get; private set; }
private MessageFactory messageFactory;
private IPAddress localAddress;
private IGatewayListProvider gatewayListProvider;
private readonly ILoggerFactory loggerFactory;
private readonly IOptions<StatisticsOptions> statisticsOptions;
private readonly ApplicationRequestsStatisticsGroup appRequestStatistics;
private readonly StageAnalysisStatisticsGroup schedulerStageStatistics;
private SharedCallbackData sharedCallbackData;
private SafeTimer callbackTimer;
public ActivationAddress CurrentActivationAddress
{
get;
private set;
}
public string CurrentActivationIdentity
{
get { return CurrentActivationAddress.ToString(); }
}
internal Task<IList<Uri>> GetGateways() =>
this.transport.GatewayManager.ListProvider.GetGateways();
public IStreamProviderRuntime CurrentStreamProviderRuntime
{
get { return clientProviderRuntime; }
}
public IGrainReferenceRuntime GrainReferenceRuntime { get; private set; }
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope",
Justification = "MessageCenter is IDisposable but cannot call Dispose yet as it lives past the end of this method call.")]
public OutsideRuntimeClient(
ILoggerFactory loggerFactory,
IOptions<ClientMessagingOptions> clientMessagingOptions,
IOptions<TypeManagementOptions> typeManagementOptions,
IOptions<StatisticsOptions> statisticsOptions,
ApplicationRequestsStatisticsGroup appRequestStatistics,
StageAnalysisStatisticsGroup schedulerStageStatistics,
ClientStatisticsManager clientStatisticsManager)
{
this.loggerFactory = loggerFactory;
this.statisticsOptions = statisticsOptions;
this.appRequestStatistics = appRequestStatistics;
this.schedulerStageStatistics = schedulerStageStatistics;
this.ClientStatistics = clientStatisticsManager;
this.logger = loggerFactory.CreateLogger<OutsideRuntimeClient>();
this.handshakeClientId = GrainId.NewClientId();
callbacks = new ConcurrentDictionary<CorrelationId, CallbackData>();
this.clientMessagingOptions = clientMessagingOptions.Value;
this.typeMapRefreshInterval = typeManagementOptions.Value.TypeMapRefreshInterval;
}
internal void ConsumeServices(IServiceProvider services)
{
try
{
AppDomain.CurrentDomain.DomainUnload += CurrentDomain_DomainUnload;
this.ServiceProvider = services;
var connectionLostHandlers = this.ServiceProvider.GetServices<ConnectionToClusterLostHandler>();
foreach (var handler in connectionLostHandlers)
{
this.ClusterConnectionLost += handler;
}
var clientInvokeCallbacks = this.ServiceProvider.GetServices<ClientInvokeCallback>();
foreach (var handler in clientInvokeCallbacks)
{
this.ClientInvokeCallback += handler;
}
this.InternalGrainFactory = this.ServiceProvider.GetRequiredService<IInternalGrainFactory>();
this.ClientStatistics = this.ServiceProvider.GetRequiredService<ClientStatisticsManager>();
this.messageFactory = this.ServiceProvider.GetService<MessageFactory>();
var serializationManager = this.ServiceProvider.GetRequiredService<SerializationManager>();
this.localObjects = new InvokableObjectManager(
this,
serializationManager,
this.loggerFactory.CreateLogger<InvokableObjectManager>());
this.sharedCallbackData = new SharedCallbackData(
this.TryResendMessage,
msg => this.UnregisterCallback(msg.Id),
this.loggerFactory.CreateLogger<CallbackData>(),
this.clientMessagingOptions,
serializationManager,
this.appRequestStatistics);
var timerLogger = this.loggerFactory.CreateLogger<SafeTimer>();
var minTicks = Math.Min(this.clientMessagingOptions.ResponseTimeout.Ticks, TimeSpan.FromSeconds(1).Ticks);
var period = TimeSpan.FromTicks(minTicks);
this.callbackTimer = new SafeTimer(timerLogger, this.OnCallbackExpiryTick, null, period, period);
this.GrainReferenceRuntime = this.ServiceProvider.GetRequiredService<IGrainReferenceRuntime>();
BufferPool.InitGlobalBufferPool(this.clientMessagingOptions);
this.clientProviderRuntime = this.ServiceProvider.GetRequiredService<ClientProviderRuntime>();
this.localAddress = ConfigUtilities.GetLocalIPAddress(this.clientMessagingOptions.PreferredFamily, this.clientMessagingOptions.NetworkInterfaceName);
// Client init / sign-on message
logger.Info(ErrorCode.ClientInitializing, string.Format(
"{0} Initializing OutsideRuntimeClient on {1} at {2} Client Id = {3} {0}",
BARS, Dns.GetHostName(), localAddress, handshakeClientId));
string startMsg = string.Format("{0} Starting OutsideRuntimeClient with runtime Version='{1}' in AppDomain={2}",
BARS, RuntimeVersion.Current, PrintAppDomainDetails());
logger.Info(ErrorCode.ClientStarting, startMsg);
if (TestOnlyThrowExceptionDuringInit)
{
throw new InvalidOperationException("TestOnlyThrowExceptionDuringInit");
}
this.gatewayListProvider = this.ServiceProvider.GetRequiredService<IGatewayListProvider>();
var statisticsLevel = statisticsOptions.Value.CollectionLevel;
if (statisticsLevel.CollectThreadTimeTrackingStats())
{
incomingMessagesThreadTimeTracking = new ThreadTrackingStatistic("ClientReceiver", this.loggerFactory, this.statisticsOptions, this.schedulerStageStatistics);
}
}
catch (Exception exc)
{
if (logger != null) logger.Error(ErrorCode.Runtime_Error_100319, "OutsideRuntimeClient constructor failed.", exc);
ConstructorReset();
throw;
}
}
public IServiceProvider ServiceProvider { get; private set; }
private async Task StreamingInitialize()
{
var implicitSubscriberTable = await transport.GetImplicitStreamSubscriberTable(this.InternalGrainFactory);
clientProviderRuntime.StreamingInitialize(implicitSubscriberTable);
}
public async Task Start(Func<Exception, Task<bool>> retryFilter = null)
{
// Deliberately avoid capturing the current synchronization context during startup and execute on the default scheduler.
// This helps to avoid any issues (such as deadlocks) caused by executing with the client's synchronization context/scheduler.
await Task.Run(() => this.StartInternal(retryFilter)).ConfigureAwait(false);
logger.Info(ErrorCode.ProxyClient_StartDone, "{0} Started OutsideRuntimeClient with Global Client ID: {1}", BARS, CurrentActivationAddress.ToString() + ", client GUID ID: " + handshakeClientId);
}
// used for testing to (carefully!) allow two clients in the same process
private async Task StartInternal(Func<Exception, Task<bool>> retryFilter)
{
// Initialize the gateway list provider, since information from the cluster is required to successfully
// initialize subsequent services.
var initializedGatewayProvider = new[] {false};
await ExecuteWithRetries(async () =>
{
if (!initializedGatewayProvider[0])
{
await this.gatewayListProvider.InitializeGatewayListProvider();
initializedGatewayProvider[0] = true;
}
var gateways = await this.gatewayListProvider.GetGateways();
if (gateways.Count == 0)
{
var gatewayProviderType = this.gatewayListProvider.GetType().GetParseableName();
var err = $"Could not find any gateway in {gatewayProviderType}. Orleans client cannot initialize.";
logger.Error(ErrorCode.GatewayManager_NoGateways, err);
throw new SiloUnavailableException(err);
}
},
retryFilter);
var generation = -SiloAddress.AllocateNewGeneration(); // Client generations are negative
transport = ActivatorUtilities.CreateInstance<ClientMessageCenter>(this.ServiceProvider, localAddress, generation, handshakeClientId);
transport.Start();
CurrentActivationAddress = ActivationAddress.NewActivationAddress(transport.MyAddress, handshakeClientId);
listeningCts = new CancellationTokenSource();
var ct = listeningCts.Token;
listenForMessages = true;
// Keeping this thread handling it very simple for now. Just queue task on thread pool.
Task.Run(
() =>
{
while (listenForMessages && !ct.IsCancellationRequested)
{
try
{
RunClientMessagePump(ct);
}
catch (Exception exc)
{
logger.Error(ErrorCode.Runtime_Error_100326, "RunClientMessagePump has thrown exception", exc);
}
}
},
ct).Ignore();
await ExecuteWithRetries(
async () => this.GrainTypeResolver = await transport.GetGrainTypeResolver(this.InternalGrainFactory),
retryFilter);
this.typeMapRefreshTimer = new AsyncTaskSafeTimer(
this.logger,
RefreshGrainTypeResolver,
null,
this.typeMapRefreshInterval,
this.typeMapRefreshInterval);
ClientStatistics.Start(transport, clientId);
await ExecuteWithRetries(StreamingInitialize, retryFilter);
async Task ExecuteWithRetries(Func<Task> task, Func<Exception, Task<bool>> shouldRetry)
{
while (true)
{
try
{
await task();
return;
}
catch (Exception exception) when (shouldRetry != null)
{
var retry = await shouldRetry(exception);
if (!retry) throw;
}
}
}
}
private async Task RefreshGrainTypeResolver(object _)
{
try
{
GrainTypeResolver = await transport.GetGrainTypeResolver(this.InternalGrainFactory);
}
catch(Exception ex)
{
this.logger.Warn(ErrorCode.TypeManager_GetClusterGrainTypeResolverError, "Refresh the GrainTypeResolver failed. WIll be retried after", ex);
}
}
private void RunClientMessagePump(CancellationToken ct)
{
incomingMessagesThreadTimeTracking?.OnStartExecution();
while (listenForMessages)
{
var message = transport.WaitMessage(Message.Categories.Application, ct);
if (message == null) // if wait was cancelled
break;
// when we receive the first message, we update the
// clientId for this client because it may have been modified to
// include the cluster name
if (!firstMessageReceived)
{
firstMessageReceived = true;
if (!handshakeClientId.Equals(message.TargetGrain))
{
clientId = message.TargetGrain;
transport.UpdateClientId(clientId);
CurrentActivationAddress = ActivationAddress.GetAddress(transport.MyAddress, clientId, CurrentActivationAddress.Activation);
}
else
{
clientId = handshakeClientId;
}
}
switch (message.Direction)
{
case Message.Directions.Response:
{
ReceiveResponse(message);
break;
}
case Message.Directions.OneWay:
case Message.Directions.Request:
{
this.localObjects.Dispatch(message);
break;
}
default:
logger.Error(ErrorCode.Runtime_Error_100327, $"Message not supported: {message}.");
break;
}
}
incomingMessagesThreadTimeTracking?.OnStopExecution();
}
public void SendResponse(Message request, Response response)
{
var message = this.messageFactory.CreateResponseMessage(request);
message.BodyObject = response;
transport.SendMessage(message);
}
/// <summary>
/// For testing only.
/// </summary>
public void Disconnect()
{
transport.Disconnect();
}
/// <summary>
/// For testing only.
/// </summary>
public void Reconnect()
{
transport.Reconnect();
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Reliability", "CA2000:Dispose objects before losing scope",
Justification = "CallbackData is IDisposable but instances exist beyond lifetime of this method so cannot Dispose yet.")]
public void SendRequest(GrainReference target, InvokeMethodRequest request, TaskCompletionSource<object> context, string debugContext = null, InvokeMethodOptions options = InvokeMethodOptions.None, string genericArguments = null)
{
var message = this.messageFactory.CreateMessage(request, options);
SendRequestMessage(target, message, context, debugContext, options, genericArguments);
}
private void SendRequestMessage(GrainReference target, Message message, TaskCompletionSource<object> context, string debugContext = null, InvokeMethodOptions options = InvokeMethodOptions.None, string genericArguments = null)
{
var targetGrainId = target.GrainId;
var oneWay = (options & InvokeMethodOptions.OneWay) != 0;
message.SendingGrain = CurrentActivationAddress.Grain;
message.SendingActivation = CurrentActivationAddress.Activation;
message.TargetGrain = targetGrainId;
if (!String.IsNullOrEmpty(genericArguments))
message.GenericGrainType = genericArguments;
if (targetGrainId.IsSystemTarget)
{
// If the silo isn't be supplied, it will be filled in by the sender to be the gateway silo
message.TargetSilo = target.SystemTargetSilo;
if (target.SystemTargetSilo != null)
{
message.TargetActivation = ActivationId.GetSystemActivation(targetGrainId, target.SystemTargetSilo);
}
}
// Client sending messages to another client (observer). Yes, we support that.
if (target.IsObserverReference)
{
message.TargetObserverId = target.ObserverId;
}
if (debugContext != null)
{
message.DebugContext = debugContext;
}
if (message.IsExpirableMessage(this.clientMessagingOptions.DropExpiredMessages))
{
// don't set expiration for system target messages.
message.TimeToLive = this.clientMessagingOptions.ResponseTimeout;
}
if (!oneWay)
{
var callbackData = new CallbackData(this.sharedCallbackData, context, message);
callbacks.TryAdd(message.Id, callbackData);
}
if (logger.IsEnabled(LogLevel.Trace)) logger.Trace("Send {0}", message);
transport.SendMessage(message);
}
private bool TryResendMessage(Message message)
{
if (!message.MayResend(this.clientMessagingOptions.MaxResendCount))
{
return false;
}
if (logger.IsEnabled(LogLevel.Debug)) logger.Debug("Resend {0}", message);
message.ResendCount = message.ResendCount + 1;
message.TargetHistory = message.GetTargetHistory();
if (!message.TargetGrain.IsSystemTarget)
{
message.TargetActivation = null;
message.TargetSilo = null;
message.ClearTargetAddress();
}
transport.SendMessage(message);
return true;
}
public void ReceiveResponse(Message response)
{
if (logger.IsEnabled(LogLevel.Trace)) logger.Trace("Received {0}", response);
// ignore duplicate requests
if (response.Result == Message.ResponseTypes.Rejection && response.RejectionType == Message.RejectionTypes.DuplicateRequest)
return;
CallbackData callbackData;
var found = callbacks.TryGetValue(response.Id, out callbackData);
if (found)
{
// We need to import the RequestContext here as well.
// Unfortunately, it is not enough, since CallContext.LogicalGetData will not flow "up" from task completion source into the resolved task.
// RequestContextExtensions.Import(response.RequestContextData);
callbackData.DoCallback(response);
}
else
{
logger.Warn(ErrorCode.Runtime_Error_100011, "No callback for response message: " + response);
}
}
private void UnregisterCallback(CorrelationId id)
{
CallbackData ignore;
callbacks.TryRemove(id, out ignore);
}
public void Reset(bool cleanup)
{
Utils.SafeExecute(() =>
{
if (logger != null)
{
logger.Info("OutsideRuntimeClient.Reset(): client Id " + clientId);
}
}, this.logger);
Utils.SafeExecute(() =>
{
if (typeMapRefreshTimer != null)
{
typeMapRefreshTimer.Dispose();
typeMapRefreshTimer = null;
}
}, logger, "Client.typeMapRefreshTimer.Dispose");
Utils.SafeExecute(() =>
{
if (clientProviderRuntime != null)
{
clientProviderRuntime.Reset(cleanup).WaitWithThrow(ResetTimeout);
}
}, logger, "Client.clientProviderRuntime.Reset");
Utils.SafeExecute(() =>
{
incomingMessagesThreadTimeTracking?.OnStopExecution();
}, logger, "Client.incomingMessagesThreadTimeTracking.OnStopExecution");
Utils.SafeExecute(() =>
{
if (transport != null)
{
transport.PrepareToStop();
}
}, logger, "Client.PrepareToStop-Transport");
listenForMessages = false;
Utils.SafeExecute(() =>
{
if (listeningCts != null)
{
listeningCts.Cancel();
}
}, logger, "Client.Stop-ListeningCTS");
Utils.SafeExecute(() =>
{
if (transport != null)
{
transport.Stop();
}
}, logger, "Client.Stop-Transport");
Utils.SafeExecute(() =>
{
if (ClientStatistics != null)
{
ClientStatistics.Stop();
}
}, logger, "Client.Stop-ClientStatistics");
ConstructorReset();
}
private void ConstructorReset()
{
Utils.SafeExecute(() =>
{
if (logger != null)
{
logger.Info("OutsideRuntimeClient.ConstructorReset(): client Id " + clientId);
}
});
try
{
AppDomain.CurrentDomain.DomainUnload -= CurrentDomain_DomainUnload;
}
catch (Exception) { }
try
{
if (clientProviderRuntime != null)
{
clientProviderRuntime.Reset().WaitWithThrow(ResetTimeout);
}
}
catch (Exception) { }
Utils.SafeExecute(() => this.Dispose());
}
/// <inheritdoc />
public TimeSpan GetResponseTimeout() => this.sharedCallbackData.ResponseTimeout;
/// <inheritdoc />
public void SetResponseTimeout(TimeSpan timeout) => this.sharedCallbackData.ResponseTimeout = timeout;
public GrainReference CreateObjectReference(IAddressable obj, IGrainMethodInvoker invoker)
{
if (obj is GrainReference)
throw new ArgumentException("Argument obj is already a grain reference.", nameof(obj));
if (obj is Grain)
throw new ArgumentException("Argument must not be a grain class.", nameof(obj));
GrainReference gr = GrainReference.NewObserverGrainReference(clientId, GuidId.GetNewGuidId(), this.GrainReferenceRuntime);
if (!localObjects.TryRegister(obj, gr.ObserverId, invoker))
{
throw new ArgumentException(String.Format("Failed to add new observer {0} to localObjects collection.", gr), "gr");
}
return gr;
}
public void DeleteObjectReference(IAddressable obj)
{
if (!(obj is GrainReference))
throw new ArgumentException("Argument reference is not a grain reference.");
var reference = (GrainReference)obj;
if (!localObjects.TryDeregister(reference.ObserverId))
throw new ArgumentException("Reference is not associated with a local object.", "reference");
}
private void CurrentDomain_DomainUnload(object sender, EventArgs e)
{
try
{
logger.Warn(ErrorCode.ProxyClient_AppDomain_Unload,
$"Current AppDomain={PrintAppDomainDetails()} is unloading.");
}
catch (Exception)
{
// just ignore, make sure not to throw from here.
}
}
private string PrintAppDomainDetails()
{
return string.Format("<AppDomain.Id={0}, AppDomain.FriendlyName={1}>", AppDomain.CurrentDomain.Id, AppDomain.CurrentDomain.FriendlyName);
}
public void Dispose()
{
if (this.disposing) return;
this.disposing = true;
Utils.SafeExecute(() => this.callbackTimer?.Dispose());
Utils.SafeExecute(() =>
{
if (typeMapRefreshTimer != null)
{
typeMapRefreshTimer.Dispose();
typeMapRefreshTimer = null;
}
});
if (listeningCts != null)
{
Utils.SafeExecute(() => listeningCts.Dispose());
listeningCts = null;
}
Utils.SafeExecute(() => transport?.Dispose());
if (ClientStatistics != null)
{
Utils.SafeExecute(() => ClientStatistics.Dispose());
ClientStatistics = null;
}
Utils.SafeExecute(() => (this.ServiceProvider as IDisposable)?.Dispose());
this.ServiceProvider = null;
GC.SuppressFinalize(this);
}
public IGrainTypeResolver GrainTypeResolver { get; private set; }
public void BreakOutstandingMessagesToDeadSilo(SiloAddress deadSilo)
{
foreach (var callback in callbacks)
{
if (deadSilo.Equals(callback.Value.Message.TargetSilo))
{
callback.Value.OnTargetSiloFail();
}
}
}
/// <inheritdoc />
public ClientInvokeCallback ClientInvokeCallback { get; set; }
/// <inheritdoc />
public event ConnectionToClusterLostHandler ClusterConnectionLost;
/// <inheritdoc />
public void NotifyClusterConnectionLost()
{
try
{
this.ClusterConnectionLost?.Invoke(this, EventArgs.Empty);
}
catch (Exception ex)
{
this.logger.Error(ErrorCode.ClientError, "Error when sending cluster disconnection notification", ex);
}
}
private void OnCallbackExpiryTick(object state)
{
var currentStopwatchTicks = Stopwatch.GetTimestamp();
foreach (var pair in callbacks)
{
var callback = pair.Value;
if (callback.IsCompleted) continue;
if (callback.IsExpired(currentStopwatchTicks)) callback.OnTimeout(this.clientMessagingOptions.ResponseTimeout);
}
}
}
}
| |
//css_ref ..\..\WixSharp.dll;
//css_ref System.Core.dll;
using System;
using System.Diagnostics;
// using System.IO;
using System.Linq;
using System.Security.Principal;
using System.Windows.Forms;
using Microsoft.Deployment.WindowsInstaller;
using WixSharp;
using WixSharp.Bootstrapper;
using WixSharp.CommonTasks;
using WixSharp.Forms;
#pragma warning disable S1075 // URIs should not be hardcoded
#pragma warning disable S1118 // Utility classes should not have public constructors
// Truly a throw away project for dev testing
public class CustomActions
{
[CustomAction]
public static ActionResult MyAction(Session session)
{
MessageBox.Show("Hello World! (CLR: v" + Environment.Version + ")", "Embedded Managed CA (" + (Is64BitProcess ? "x64" : "x86") + ")");
session.Log("Begin MyAction Hello World");
return ActionResult.Success;
}
[CustomAction]
public static ActionResult CheckIfAdmin(Session session)
{
if (!new WindowsPrincipal(WindowsIdentity.GetCurrent()).IsInRole(WindowsBuiltInRole.Administrator))
{
MessageBox.Show(session.GetMainWindow(), "You must start the msi file as admin");
var startInfo = new ProcessStartInfo();
startInfo.UseShellExecute = true;
startInfo.WorkingDirectory = Environment.CurrentDirectory;
startInfo.FileName = "msiexec.exe";
startInfo.Arguments = "/i \"" + session.Property("OriginalDatabase") + "\"";
startInfo.Verb = "runas";
Process.Start(startInfo);
return ActionResult.Failure;
}
else
{
return ActionResult.Success;
}
}
public static bool Is64BitProcess
{
get { return IntPtr.Size == 8; }
}
}
static class Script
{
static void prepare_dirs(string root)
{
for (int i = 0; i < 40; i++)
{
var dir = root.PathJoin(i.ToString());
System.IO.Directory.CreateDirectory(dir);
System.IO.File.WriteAllText(dir.PathJoin($"file_{i}.txt"), i.ToString());
}
}
static void Issue_865()
{
// Compiler.AutoGeneration.InstallDirDefaultId = "CommonAppDataFolder";
var project = new Project("My Product Name",
new Dir(@"%CommonAppDataFolder%",
new WixSharp.Dir("SubFolder1", new WixSharp.File(@"c:\temp\Dockerfile")),
new WixSharp.Dir("SubFolder2", new WixSharp.File(@"c:\temp\Dockerfile"))));
var wix = System.IO.File.ReadAllText(project.BuildWxs());
Console.WriteLine(wix);
}
static void Issue_386()
{
var project =
new ManagedProject("ElevatedSetup",
new Dir(@"%ProgramFiles%\My Company\My Product",
new WixSharp.File(@"Files\bin\MyApp.exe")));
project.ManagedUI = ManagedUI.Default;
project.AddAction(new ManagedAction(CustomActions.CheckIfAdmin,
Return.check,
When.Before,
Step.AppSearch,
Condition.NOT_Installed,
Sequence.InstallUISequence));
project.UIInitialized += (SetupEventArgs e) =>
{
if (!new WindowsPrincipal(WindowsIdentity.GetCurrent()).IsInRole(WindowsBuiltInRole.Administrator))
{
MessageBox.Show(e.Session.GetMainWindow(), "You must start the msi file as admin", e.ProductName);
e.Result = ActionResult.Failure;
var startInfo = new ProcessStartInfo();
startInfo.UseShellExecute = true;
startInfo.WorkingDirectory = Environment.CurrentDirectory;
startInfo.FileName = "msiexec.exe";
startInfo.Arguments = "/i \"" + e.MsiFile + "\"";
startInfo.Verb = "runas";
Process.Start(startInfo);
}
};
// project.PreserveTempFiles = true;
Compiler.BuildMsi(project);
}
static void Issue_825()
{
var client = new Feature("Feature_Client");
var server = new Feature("Feature_Server");
var project = new Project("Test",
new Dir("ProgramFiles64Folder",
new Dir("Test",
new Dir("Server",
new Dir("Sub",
new Files(server, @"Files\Docs\*.* "))),
new Dir(client, "Client",
new Dir("Sub",
new Files(@"Files\Help\*.* "))))));
project.Platform = Platform.x64;
project.UI = WUI.WixUI_FeatureTree;
project.PreserveTempFiles = true;
project.PreserveDbgFiles = true;
project.WixSourceGenerated += doc =>
{
doc.FindAll("Feature")
.First(x => x.HasAttribute("Id", "Complete"))
.Remove();
};
project.BuildMsiCmd();
}
static void Issue_825_a()
{
var client = new Feature("Feature_Client");
var server = new Feature("Feature_Server");
var project = new Project("Test",
new Dir("ProgramFiles64Folder",
new Dir("Test",
new Dir("Server",
new Dir(server, "Sub",
new File(server, "setup.cs"))),
new Dir("Client",
new Dir(client, "Sub",
new File(client, "test.cs"))))));
project.Platform = Platform.x64;
project.UI = WUI.WixUI_FeatureTree;
project.PreserveTempFiles = true;
project.PreserveDbgFiles = true;
project.BuildMsi();
}
static void Issue_374()
{
string inDir = @"C:\temp\wixIn\";
string outDir = @"C:\temp\wixOut\";
string file = @"C:\temp\wixIn\MyApp.exe";
file = "setup.cs";
var project = new Project("TestMsi")
{
GUID = Guid.NewGuid(),
PreserveTempFiles = true,
OutDir = outDir,
UI = WUI.WixUI_ProgressOnly,
Dirs = new[]
{
new Dir(@"temp", new Dir(@"wixIn", new WixSharp.File(file, new FileShortcut("MyShortcut", inDir))))
}
};
Compiler.BuildMsi(project);
}
static void Issue_609()
{
AutoElements.DisableAutoKeyPath = true;
var project = new Project("MyProduct",
new Dir(@"%LocalAppDataFolder%\My Company\My Product", new File("setup.cs")),
new RegValue(RegistryHive.CurrentUser, @"Software\My Company\My Product", "LICENSE_KEY", "123456"));
project.PreserveTempFiles = true;
project.BuildMsi();
}
static void Issue_377()
{
var project = new Project("someProject",
new Dir(new Id("someDirId"), "someDirPath",
new File("someFilePath"
, new FileAssociation("someExt")
{
Icon = "someFile.ico",
Advertise = true
}
)));
project.ControlPanelInfo.ProductIcon = "someProduct.ico";
Compiler.BuildMsi(project);
}
static void Issue_606()
{
var aisFeature = new Feature("AIS", "Allied Information Services")
{
Display = FeatureDisplay.expand
};
var webFeature = new Feature("Website", "Manager")
{
IsEnabled = false,
Condition = new FeatureCondition("WEBSITE_FEATURE = 1", level: 1)
};
aisFeature.Add(webFeature);
var project = new ManagedProject("AIS Manager",
// Base directory
new Dir(@"%ProgramFiles%\Allied\AIS Manager",
// ABS
new Dir(new Id("WEBSITEDIR"), webFeature, "Website",
new File(webFeature, @"setup.cs")
)
)
{
// AttributesDefinition = "Component:Win64=yes"
}
)
{
GUID = new Guid("E535C39D-5FE8-4C19-802D-8033E7A15B5C"),
UI = WUI.WixUI_FeatureTree,
PreserveTempFiles = true,
Platform = Platform.x64,
OutFileName = "AIS Manager x64"
};
// Tasks.RemoveDialogsBetween(project, NativeDialogs.WelcomeDlg, NativeDialogs.CustomizeDlg);
project.BuildMsi();
}
static void Issue_551()
{
var bundle = new Bundle("MyBundle", new PackageGroupRef("NetFx471Web"))
{
OutFileName = "MyBundle",
Version = new Version("1.0")
};
bundle.Include(WixExtension.Util);
bundle.WxsFiles.Add(@"E:\PrivateData\Galos\Projects\Support\MultiWxsBundle\MultiWxsBundle\NetFx471.wxs");
// uncomment this line for the build to succeed - this should happen automatically
// bundle.LightOptions = "NetFx471.wixobj";
bundle.OutDir = @"E:\PrivateData\Galos\Projects\Support\MultiWxsBundle\MultiWxsBundle";
bundle.BuildCmd();
}
static void Issue_440()
{
Compiler.WixLocation = @"E:\Projects\WixSharp\Support\Issue_#440\wix_error\packages\WiX.4.0.0.5512-pre\tools";
Compiler.WixSdkLocation = @"E:\Projects\WixSharp\Support\Issue_#440\wix_error\packages\WiX.4.0.0.5512-pre\tools\sdk";
var project = new ManagedProject("TestMsi")
{
GUID = Guid.NewGuid(),
PreserveTempFiles = true,
UI = WUI.WixUI_ProgressOnly,
Dirs = new[]
{
new Dir(@"temp", new Dir(@"wixIn", new WixSharp.File(@"E:\Projects\WixSharp\Source\src\WixSharp.Samples\Support\testpad\setup.cs")))
}
};
Compiler.BuildMsi(project);
}
static void Issue_378()
{
AutoElements.DisableAutoUserProfileRegistry = true;
// Compiler.LightOptions += " -sice:ICE38";
var project = new Project("My Product",
// new Dir(@"%ProgramFiles%/My Company/My Product",
new Dir(@"%LocalAppData%/My Company/My Product",
new File("setup.cs")));
// project.DefaultFeature = mainFeature;
project.PreserveTempFiles = true;
project.GUID = new Guid("6fe30b47-2577-43ad-9095-1861ba25889c");
project.BuildMsi();
}
static void Issue_298()
{
var project = new Project("MyProduct",
new Dir(@"%ProgramFiles%\My Company\My Product",
new File("setup.cs"),
new File("setup.cs")
))
{
Platform = Platform.x64,
GUID = new Guid("6fe30b47-2577-43ad-9095-1861ba25889b")
};
project.AddRegValue(new RegValue(RegistryHive.LocalMachine, @"Software\test", "foo_value", "bar") { Win64 = false });
project.AddRegValue(new RegValue(RegistryHive.LocalMachine, @"Software\test", "foo_value", "bar") { Win64 = false });
// new RegValue(Feature, RegistryHive.LocalMachine, @"SOFTWARE\Microsoft\Internet Explorer\Main\FeatureControl\FEATURE_BROWSER_EMULATION", "WebBrowserContainer", 11000) { Win64 = false },
// new RegValue(Feature, RegistryHive.LocalMachine, @"SOFTWARE\Microsoft\Internet Explorer\Main\FeatureControl\FEATURE_96DPI_PIXEL", "WebBrowserContainer", 1) { Win64 = false },
// Compiler.LightOptions += " -sice:ICE80";
project.PreserveTempFiles = true;
project.BuildMsi();
}
static void Issue_298b()
{
var project =
new Project("MyProduct",
new RegValue(RegistryHive.LocalMachine, @"Software\test", "foo_value", "bar") { Win64 = false },
new RegValue(RegistryHive.LocalMachine, @"Software\test", "foo_value", "bar") { Win64 = true });
project.PreserveTempFiles = true;
project.Platform = Platform.x64;
project.BuildMsi();
// project.GUID = new Guid("6fe30b47-2577-43ad-9095-1861ba25889b");
// // Compiler.LightOptions += " -sice:ICE80";
// project.BuildMsiCmd();
}
#pragma warning disable
static public void Main()
{
// var file = @"E:\PrivateData\Galos\Projects\WixSharp\Source\src\WixSharp.Samples\Support\testpad\test_asm.dll";
// file = @"E:\PrivateData\Galos\Projects\WixSharp\Source\src\packages\Newtonsoft.Json.12.0.3\lib\net45\Newtonsoft.Json.dll";
//
// var asm = System.Reflection.Assembly.ReflectionOnlyLoad(System.IO.File.ReadAllBytes(file));
// HiTeach_MSI.Program.Main1(); return;
// MsiInstaller.MyMsi.Build(); return;
Issue_865(); return;
Issue_825(); return;
Issue_609(); return;
Issue_551(); return;
Issue_606(); return;
Issue_377(); return;
Issue_440(); return;
Issue_386(); return;
Issue_378(); return;
Issue_374(); return;
Issue_298(); return;
// Compiler.AutoGeneration.LegacyDefaultIdAlgorithm = true;
var serverFeature = new Feature("Server");
// var completeFeature = new Feature("Complete");
// completeFeature.Add(serverFeature);
Project project = new Project("TaxPacc",
// new LaunchCondition("CUSTOM_UI=\"true\" OR REMOVE=\"ALL\"", "Please run setup.exe instead."),
new Dir(@"%ProgramFiles%\TaxPacc",
new File("setup.cs")),
new Dir(serverFeature,
@"%CommonAppDataFolder%\TaxPacc\Server",
new DirPermission("serviceaccountusername", "serviceaccountdomain", GenericPermission.All)
));
project.UI = WUI.WixUI_FeatureTree;
project.PreserveTempFiles = true;
project.BuildMsiCmd();
}
static public void Main1()
{
var project = new ManagedProject("IsUninstallTest",
new Dir(@"%ProgramFiles%\UninstallTest",
new File(@"files\setup.cs")));
project.AfterInstall += Project_AfterInstall;
project.PreserveTempFiles = true;
project.BuildWxs();
}
private static void Project_AfterInstall(SetupEventArgs e)
{
MessageBox.Show("Is Uninstalling: " + e.IsUninstalling);
if (e.IsUninstalling)
{
// e.IsUninstalling is always false if the uninstall is triggered via executing the msi again
// and click remove in the maintenance dialog
}
}
static public void Main2()
{
var project = new ManagedProject("MyProduct",
new Dir(@"C:\My Company\My Product",
new File("setup.cs")));
project.ManagedUI = new ManagedUI();
project.ManagedUI.InstallDialogs.Add(Dialogs.Progress)
.Add(Dialogs.Exit);
project.ManagedUI.ModifyDialogs.Add(Dialogs.Progress)
.Add(Dialogs.Exit);
project.UIInitialized += (SetupEventArgs e) =>
{
if (e.IsInstalling && !e.IsUpgrading)
{
e.Session["ALLUSERS"] = "2";
if (MessageBox.Show("Install for All?", e.ProductName, MessageBoxButtons.YesNo) == DialogResult.Yes)
e.Session["MSIINSTALLPERUSER"] = "0";
else
e.Session["MSIINSTALLPERUSER"] = "1";
}
};
project.BuildMsi();
}
static public void Main3()
{
var application = new Feature("Application") { Name = "Application", Description = "Application" };
var drivers = new Feature("Drivers") { Name = "Drivers", Description = "Drivers", AttributesDefinition = $"Display = {FeatureDisplay.expand}" };
var driver1 = new Feature("Driver 1") { Name = "Driver 1", Description = "Driver 1", IsEnabled = false };
var driver2 = new Feature("Driver 2") { Name = "Driver 2", Description = "Driver 2" };
var project =
new ManagedProject("MyProduct",
new Dir(@"%ProgramFiles%\My Company\My Product",
new File(application, @"Files\Bin\MyApp.exe"),
new Dir("Drivers",
new Dir("Driver1",
new File(driver1, @"Files\Docs\Manual.txt")),
new Dir("Driver2",
new File(driver2, @"Files\Docs\Manual.txt")))));
// project.Package.AttributesDefinition = "InstallPrivileges=elevated;AdminImage=yes;InstallScope=perMachine";
// project.UI = WUI.WixUI_InstallDir;
project.ManagedUI = new ManagedUI();
project.ManagedUI.InstallDialogs.Add(Dialogs.Welcome)
.Add(Dialogs.Features)
.Add(Dialogs.InstallDir)
.Add(Dialogs.Progress)
.Add(Dialogs.Exit);
//removing entry dialog
project.ManagedUI.ModifyDialogs.Add(Dialogs.MaintenanceType)
.Add(Dialogs.Features)
.Add(Dialogs.Progress)
.Add(Dialogs.Exit);
project.GUID = new Guid("6f330b47-2577-43ad-9095-1861ba25889b");
drivers.Add(driver1);
drivers.Add(driver2);
project.PreserveTempFiles = true;
project.BuildMsi();
}
}
| |
// This file is part of YamlDotNet - A .NET library for YAML.
// Copyright (c) Antoine Aubry and contributors
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
// of the Software, and to permit persons to whom the Software is furnished to do
// so, subject to the following conditions:
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using YamlDotNet.Core;
using YamlDotNet.Core.Events;
namespace YamlDotNet.RepresentationModel
{
/// <summary>
/// Represents an YAML document.
/// </summary>
[Serializable]
public class YamlDocument
{
/// <summary>
/// Gets or sets the root node.
/// </summary>
/// <value>The root node.</value>
public YamlNode RootNode { get; private set; }
/// <summary>
/// Initializes a new instance of the <see cref="YamlDocument"/> class.
/// </summary>
public YamlDocument(YamlNode rootNode)
{
RootNode = rootNode;
}
/// <summary>
/// Initializes a new instance of the <see cref="YamlDocument"/> class with a single scalar node.
/// </summary>
public YamlDocument(string rootNode)
{
RootNode = new YamlScalarNode(rootNode);
}
/// <summary>
/// Initializes a new instance of the <see cref="YamlDocument"/> class.
/// </summary>
internal YamlDocument(IParser parser)
{
var state = new DocumentLoadingState();
parser.Expect<DocumentStart>();
while (!parser.Accept<DocumentEnd>())
{
Debug.Assert(RootNode == null);
RootNode = YamlNode.ParseNode(parser, state);
if (RootNode is YamlAliasNode)
{
throw new YamlException();
}
}
state.ResolveAliases();
parser.Expect<DocumentEnd>();
}
/// <summary>
/// Visitor that assigns anchors to nodes that are referenced more than once.
/// Existing anchors are preserved as much as possible.
/// </summary>
private class AnchorAssigningVisitor : YamlVisitorBase
{
private readonly HashSet<string> existingAnchors = new HashSet<string>();
/// <summary>
/// Key: Node, Value: IsDuplicate
/// </summary>
private readonly Dictionary<YamlNode, bool> visitedNodes = new Dictionary<YamlNode, bool>();
public void AssignAnchors(YamlDocument document)
{
existingAnchors.Clear();
visitedNodes.Clear();
document.Accept(this);
var random = new Random();
foreach (var visitedNode in visitedNodes)
{
if (visitedNode.Value)
{
string anchor;
// If the existing anchor is not already used, we can have it
if (!string.IsNullOrEmpty(visitedNode.Key.Anchor) && !existingAnchors.Contains(visitedNode.Key.Anchor))
{
anchor = visitedNode.Key.Anchor;
}
else
{
do
{
anchor = random.Next().ToString(CultureInfo.InvariantCulture);
} while (existingAnchors.Contains(anchor));
}
existingAnchors.Add(anchor);
visitedNode.Key.Anchor = anchor;
}
}
}
/// <summary>
/// Returns whether the visited node is a duplicate.
/// </summary>
private bool VisitNodeAndFindDuplicates(YamlNode node)
{
bool isDuplicate;
if (visitedNodes.TryGetValue(node, out isDuplicate))
{
if (!isDuplicate)
{
visitedNodes[node] = true;
}
return !isDuplicate;
}
else
{
visitedNodes.Add(node, false);
return false;
}
}
public override void Visit(YamlScalarNode scalar)
{
VisitNodeAndFindDuplicates(scalar);
}
public override void Visit(YamlMappingNode mapping)
{
if (!VisitNodeAndFindDuplicates(mapping))
base.Visit(mapping);
}
public override void Visit(YamlSequenceNode sequence)
{
if (!VisitNodeAndFindDuplicates(sequence))
base.Visit(sequence);
}
}
private void AssignAnchors()
{
var visitor = new AnchorAssigningVisitor();
visitor.AssignAnchors(this);
}
internal void Save(IEmitter emitter, bool assignAnchors = true)
{
if (assignAnchors)
{
AssignAnchors();
}
emitter.Emit(new DocumentStart());
RootNode.Save(emitter, new EmitterState());
emitter.Emit(new DocumentEnd(false));
}
/// <summary>
/// Accepts the specified visitor by calling the appropriate Visit method on it.
/// </summary>
/// <param name="visitor">
/// A <see cref="IYamlVisitor"/>.
/// </param>
public void Accept(IYamlVisitor visitor)
{
visitor.Visit(this);
}
/// <summary>
/// Gets all nodes from the document.
/// <see cref="MaximumRecursionLevelReachedException"/> is thrown if an infinite recursion is detected.
/// </summary>
public IEnumerable<YamlNode> AllNodes
{
get
{
return RootNode.AllNodes;
}
}
}
}
| |
using System;
using System.Linq;
class Capture
{
void M(int i)
{
var x = i;
Action a = () =>
{
Use(i);
i = 1;
Use(i);
x = 2;
Use(x);
var y = x;
Action b = () =>
{
Use(x);
Use(y);
};
b();
Use(y);
};
var z = 0;
Action c = () => { z = 1; };
c();
Use(i);
Use(x);
Use(z);
z = 0;
a();
Use(i);
Use(x);
Use(z);
x = 0;
M(x);
Use(x);
M2(a);
Use(x);
}
void M2(Action a)
{
Action b = () => { a = () => { }; };
b();
a();
}
void LibraryMethodDelegateUpdate(IQueryable<string> strings)
{
var i = 0;
Func<string, int> e = _ => i++;
strings.Select(e).ToArray();
Use(i);
}
void LibraryMethodDelegateRead(string[] strings)
{
var c = 'a';
var az = strings.Where(s => s.Contains(c)).ToArray();
string M(string s) { Console.WriteLine(c); return s; };
strings.Select(M).ToArray();
}
void LibraryMethodDelegateExpressionUpdate(IQueryable<string> strings)
{
var i = 0;
System.Linq.Expressions.Expression<Func<string, int>> e = _ => Inc(ref i);
strings.Select(e).ToArray();
Use(i);
}
static int Inc(ref int i) => i++;
void LibraryMethodDelegateExpressionRead(IQueryable<string> strings)
{
var b = true;
System.Linq.Expressions.Expression<Func<string, bool>> e = _ => b;
strings.Where(e).ToArray();
}
void DelegateTest()
{
int fn(D d) => d();
var y = 12;
fn(() =>
{
var x = y;
return x;
});
var z = 12; // Should *not* get an SSA definition, but currently does because it is considered live via the lambda
fn(() =>
{
z = 0;
return z;
});
}
delegate int D();
void NestedFunctionsTest()
{
var a = 12;
void M1()
{
var x = a;
Use(x);
};
M1();
var b = 12; // Should *not* get an SSA definition, but currently does because it is considered live via the lambda
void M2()
{
b = 0;
Use(b);
};
M2();
var c = 12; // Should *not* get an SSA definition, but does because the update via the call to `M3` is not considered certain
void M3()
{
c = 0;
Use(c);
};
M3();
Use(c);
var d = 12; // Should *not* get an SSA definition, but does because the update via the call to `M4` is not considered certain
void M4()
{
d = 0;
};
M4();
Use(d);
var e = 12;
void M5()
{
Use(e);
e = 0; // Should *not* get an SSA definition (`e` is never read)
}
var f = 12;
Use(f);
void M6()
{
f = 0; // Should *not* get an SSA definition (`f` is not read after `M6` is called), but currently does because it is considered live via the call to `M6`
}
M6();
var g = 12; // Should *not* get an SSA definition (`M7` is never called)
void M7()
{
Use(g);
};
var h = 12; // Should *not* get an SSA definition
void M8()
{
h = 0;
void M9()
{
h = 0;
}
M9();
Use(h);
}
void M10()
{
var i = 0; // Should *not* get an SSA definition, but does because because of simplified implicit-read analysis
void M11()
{
Use(i);
}
M10(); // Not an implicit read of `i` as a new copy is created, but not detected by simplified analysis
i = 1;
M11();
}
}
void EventsTest()
{
void EventFromSource()
{
int i = 0;
MyEventHandler eh = () => Use(i);
this.Click += eh;
Click();
i = 0; // Should *not* get an SSA definition (`Click` is not called after addition)
MyEventHandler eh2 = () => Use(i);
this.Click += eh2;
}
void EventFromLibrary()
{
var i = 0; // Should *not* get an SSA definition, but does because of the imprecise implicit read below
using (var p = new System.Diagnostics.Process())
{
EventHandler exited = (object s, EventArgs e) => Use(i);
p.Exited += exited; // Imprecise implicit read of `i`; an actual read would happen in a call to e.g. `p.Start()` or `p.OnExited()`
}
}
}
public delegate void MyEventHandler();
public event MyEventHandler Click;
public static void Use<T>(T u) { }
}
class C
{
void M1()
{
int i = 0;
void M2() => System.Console.WriteLine(i);
i = 1;
M2();
void M3() { i = 2; };
M3();
System.Console.WriteLine(i);
}
void M2()
{
int i = 0;
void CaptureWrite()
{
i = 1;
}
void CaptureAndRef(ref int j)
{
CaptureWrite();
j = 2;
}
CaptureAndRef(ref i); // explicit definition only (no call definition)
System.Console.WriteLine(i);
}
}
| |
using UnityEngine;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using Assets.PostProcessing.Runtime;
using Assets.PostProcessing.Runtime.Models;
using Assets.PostProcessing.Runtime.Utils;
namespace UnityEditor.PostProcessing
{
//[CanEditMultipleObjects]
[CustomEditor(typeof(PostProcessingProfile))]
public class PostProcessingInspector : Editor
{
static GUIContent s_PreviewTitle = new GUIContent("Monitors");
PostProcessingProfile m_ConcreteTarget
{
get { return target as PostProcessingProfile; }
}
int m_CurrentMonitorID
{
get { return m_ConcreteTarget.monitors.currentMonitorID; }
set { m_ConcreteTarget.monitors.currentMonitorID = value; }
}
List<PostProcessingMonitor> m_Monitors;
GUIContent[] m_MonitorNames;
Dictionary<PostProcessingModelEditor, PostProcessingModel> m_CustomEditors = new Dictionary<PostProcessingModelEditor, PostProcessingModel>();
public bool IsInteractivePreviewOpened { get; private set; }
void OnEnable()
{
if (target == null)
return;
// Aggregate custom post-fx editors
var assembly = Assembly.GetAssembly(typeof(PostProcessingInspector));
var editorTypes = assembly.GetTypes()
.Where(x => x.IsDefined(typeof(PostProcessingModelEditorAttribute), false));
var customEditors = new Dictionary<Type, PostProcessingModelEditor>();
foreach (var editor in editorTypes)
{
var attr = (PostProcessingModelEditorAttribute)editor.GetCustomAttributes(typeof(PostProcessingModelEditorAttribute), false)[0];
var effectType = attr.type;
var alwaysEnabled = attr.alwaysEnabled;
var editorInst = (PostProcessingModelEditor)Activator.CreateInstance(editor);
editorInst.alwaysEnabled = alwaysEnabled;
editorInst.profile = target as PostProcessingProfile;
editorInst.inspector = this;
customEditors.Add(effectType, editorInst);
}
// ... and corresponding models
var baseType = target.GetType();
var property = serializedObject.GetIterator();
while (property.Next(true))
{
if (!property.hasChildren)
continue;
var type = baseType;
var srcObject = ReflectionUtils.GetFieldValueFromPath(serializedObject.targetObject, ref type, property.propertyPath);
if (srcObject == null)
continue;
PostProcessingModelEditor editor;
if (customEditors.TryGetValue(type, out editor))
{
var effect = (PostProcessingModel)srcObject;
if (editor.alwaysEnabled)
effect.enabled = editor.alwaysEnabled;
m_CustomEditors.Add(editor, effect);
editor.target = effect;
editor.serializedProperty = property.Copy();
editor.OnPreEnable();
}
}
// Prepare monitors
m_Monitors = new List<PostProcessingMonitor>();
var monitors = new List<PostProcessingMonitor>
{
new HistogramMonitor(),
new WaveformMonitor(),
new ParadeMonitor(),
new VectorscopeMonitor()
};
var monitorNames = new List<GUIContent>();
foreach (var monitor in monitors)
{
if (monitor.IsSupported())
{
monitor.Init(m_ConcreteTarget.monitors, this);
m_Monitors.Add(monitor);
monitorNames.Add(monitor.GetMonitorTitle());
}
}
m_MonitorNames = monitorNames.ToArray();
if (m_Monitors.Count > 0)
m_ConcreteTarget.monitors.onFrameEndEditorOnly = OnFrameEnd;
}
void OnDisable()
{
if (m_CustomEditors != null)
{
foreach (var editor in m_CustomEditors.Keys)
editor.OnDisable();
m_CustomEditors.Clear();
}
if (m_Monitors != null)
{
foreach (var monitor in m_Monitors)
monitor.Dispose();
m_Monitors.Clear();
}
if (m_ConcreteTarget != null)
m_ConcreteTarget.monitors.onFrameEndEditorOnly = null;
}
void OnFrameEnd(RenderTexture source)
{
if (!IsInteractivePreviewOpened)
return;
if (m_CurrentMonitorID < m_Monitors.Count)
m_Monitors[m_CurrentMonitorID].OnFrameData(source);
IsInteractivePreviewOpened = false;
}
public override void OnInspectorGUI()
{
serializedObject.Update();
// Handles undo/redo events first (before they get used by the editors' widgets)
var e = Event.current;
if (e.type == EventType.ValidateCommand && e.commandName == "UndoRedoPerformed")
{
foreach (var editor in m_CustomEditors)
editor.Value.OnValidate();
}
if (!m_ConcreteTarget.debugViews.IsModeActive(BuiltinDebugViewsModel.Mode.None))
EditorGUILayout.HelpBox("A debug view is currently enabled. Changes done to an effect might not be visible.", MessageType.Info);
foreach (var editor in m_CustomEditors)
{
EditorGUI.BeginChangeCheck();
editor.Key.OnGUI();
if (EditorGUI.EndChangeCheck())
editor.Value.OnValidate();
}
serializedObject.ApplyModifiedProperties();
}
public override GUIContent GetPreviewTitle()
{
return s_PreviewTitle;
}
public override bool HasPreviewGUI()
{
return GraphicsUtils.supportsDX11 && m_Monitors.Count > 0;
}
public override void OnPreviewSettings()
{
using (new EditorGUILayout.HorizontalScope())
{
if (m_CurrentMonitorID < m_Monitors.Count)
m_Monitors[m_CurrentMonitorID].OnMonitorSettings();
GUILayout.Space(5);
m_CurrentMonitorID = EditorGUILayout.Popup(m_CurrentMonitorID, m_MonitorNames, FxStyles.preDropdown, GUILayout.MaxWidth(100f));
}
}
public override void OnInteractivePreviewGUI(Rect r, GUIStyle background)
{
IsInteractivePreviewOpened = true;
if (m_CurrentMonitorID < m_Monitors.Count)
m_Monitors[m_CurrentMonitorID].OnMonitorGUI(r);
}
}
}
| |
using System;
using System.IO;
using System.Net;
using NServiceKit.Common.Web;
using NServiceKit.Service;
using NServiceKit.ServiceClient.Web;
using NServiceKit.ServiceHost;
using NServiceKit.Text;
using NServiceKit.WebHost.Endpoints.Support.Mocks;
using NServiceKit.WebHost.Endpoints.Tests.Mocks;
namespace NServiceKit.WebHost.Endpoints.Tests.Support
{
/// <summary>A direct service client.</summary>
public class DirectServiceClient : IServiceClient, IRestClient
{
ServiceManager ServiceManager { get; set; }
readonly HttpRequestMock httpReq = new HttpRequestMock();
readonly HttpResponseMock httpRes = new HttpResponseMock();
/// <summary>Initializes a new instance of the NServiceKit.WebHost.Endpoints.Tests.Support.DirectServiceClient class.</summary>
///
/// <param name="serviceManager">Manager for service.</param>
public DirectServiceClient(ServiceManager serviceManager)
{
this.ServiceManager = serviceManager;
}
/// <summary>Sends an one way.</summary>
///
/// <param name="request">The request.</param>
public void SendOneWay(object request)
{
ServiceManager.Execute(request);
}
/// <summary>Sends an one way.</summary>
///
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="request"> The request.</param>
public void SendOneWay(string relativeOrAbsoluteUrl, object request)
{
ServiceManager.Execute(request);
}
private bool ApplyRequestFilters<TResponse>(object request)
{
if (EndpointHost.ApplyRequestFilters(httpReq, httpRes, request))
{
ThrowIfError<TResponse>(httpRes);
return true;
}
return false;
}
private void ThrowIfError<TResponse>(HttpResponseMock httpRes)
{
if (httpRes.StatusCode >= 400)
{
var webEx = new WebServiceException("WebServiceException, StatusCode: " + httpRes.StatusCode) {
StatusCode = httpRes.StatusCode,
StatusDescription = httpRes.StatusDescription,
};
try
{
var deserializer = EndpointHost.AppHost.ContentTypeFilters.GetStreamDeserializer(httpReq.ResponseContentType);
webEx.ResponseDto = deserializer(typeof(TResponse), httpRes.OutputStream);
}
catch (Exception ex)
{
Console.WriteLine(ex);
}
throw webEx;
}
}
private bool ApplyResponseFilters<TResponse>(object response)
{
if (EndpointHost.ApplyResponseFilters(httpReq, httpRes, response))
{
ThrowIfError<TResponse>(httpRes);
return true;
}
return false;
}
/// <summary>Send this message.</summary>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Send<TResponse>(object request)
{
httpReq.HttpMethod = HttpMethods.Post;
if (ApplyRequestFilters<TResponse>(request)) return default(TResponse);
var response = ServiceManager.ServiceController.Execute(request,
new HttpRequestContext(httpReq, httpRes, request, EndpointAttributes.HttpPost));
if (ApplyResponseFilters<TResponse>(response)) return (TResponse)response;
return (TResponse)response;
}
/// <summary>Send this message.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Send<TResponse>(IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Send this message.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request to get.</param>
public void Send(IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Gets.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Get<TResponse>(IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Gets the given request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request to get.</param>
public void Get(IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Gets.</summary>
///
/// <exception cref="ArgumentException">Thrown when one or more arguments have unsupported or illegal values.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Get<TResponse>(string relativeOrAbsoluteUrl)
{
httpReq.HttpMethod = HttpMethods.Get;
var requestTypeName = typeof(TResponse).Namespace + "." + relativeOrAbsoluteUrl;
var requestType = typeof (TResponse).Assembly.GetType(requestTypeName);
if (requestType == null)
throw new ArgumentException("Type not found: " + requestTypeName);
var request = requestType.CreateInstance();
if (ApplyRequestFilters<TResponse>(request)) return default(TResponse);
var response = ServiceManager.ServiceController.Execute(request,
new HttpRequestContext(httpReq, httpRes, request, EndpointAttributes.HttpGet));
if (ApplyResponseFilters<TResponse>(response)) return (TResponse)response;
return (TResponse)response;
}
/// <summary>Deletes the given relativeOrAbsoluteUrl.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Delete<TResponse>(IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Deletes the given request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request to delete.</param>
public void Delete(IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Deletes the given relativeOrAbsoluteUrl.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Delete<TResponse>(string relativeOrAbsoluteUrl)
{
throw new NotImplementedException();
}
/// <summary>Post this message.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Post<TResponse>(IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Post this message.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request.</param>
public void Post(IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Post this message.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="request"> The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Post<TResponse>(string relativeOrAbsoluteUrl, object request)
{
throw new NotImplementedException();
}
/// <summary>Puts.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Put<TResponse>(IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Puts the given request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request to put.</param>
public void Put(IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Puts.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="request"> The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Put<TResponse>(string relativeOrAbsoluteUrl, object request)
{
throw new NotImplementedException();
}
/// <summary>Patches.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request">The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Patch<TResponse>(IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Patches the given request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request.</param>
public void Patch(IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Patches.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="request"> The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse Patch<TResponse>(string relativeOrAbsoluteUrl, object request)
{
throw new NotImplementedException();
}
/// <summary>Posts a file.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="fileToUpload"> The file to upload.</param>
/// <param name="mimeType"> Type of the mime.</param>
///
/// <returns>A TResponse.</returns>
public TResponse PostFile<TResponse>(string relativeOrAbsoluteUrl, FileInfo fileToUpload, string mimeType)
{
throw new NotImplementedException();
}
/// <summary>Custom method.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="httpVerb">The HTTP verb.</param>
/// <param name="request"> The request.</param>
public void CustomMethod(string httpVerb, IReturnVoid request)
{
throw new NotImplementedException();
}
/// <summary>Custom method.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="httpVerb">The HTTP verb.</param>
/// <param name="request"> The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse CustomMethod<TResponse>(string httpVerb, IReturn<TResponse> request)
{
throw new NotImplementedException();
}
/// <summary>Heads the given request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="request">The request.</param>
///
/// <returns>A HttpWebResponse.</returns>
public HttpWebResponse Head(IReturn request)
{
throw new NotImplementedException();
}
/// <summary>Heads.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
///
/// <returns>A HttpWebResponse.</returns>
public HttpWebResponse Head(string relativeOrAbsoluteUrl)
{
throw new NotImplementedException();
}
/// <summary>Posts a file.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="fileToUpload"> The file to upload.</param>
/// <param name="fileInfo"> Information describing the file.</param>
/// <param name="mimeType"> Type of the mime.</param>
///
/// <returns>A TResponse.</returns>
public TResponse PostFile<TResponse>(string relativeOrAbsoluteUrl, Stream fileToUpload, string fileInfo, string mimeType)
{
throw new NotImplementedException();
}
/// <summary>Sends the asynchronous.</summary>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request"> The request.</param>
/// <param name="onSuccess">The on success.</param>
/// <param name="onError"> The on error.</param>
public void SendAsync<TResponse>(object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
var response = default(TResponse);
try
{
try
{
if (ApplyRequestFilters<TResponse>(request))
{
onSuccess(default(TResponse));
return;
}
}
catch (Exception ex)
{
onError(default(TResponse), ex);
return;
}
response = this.Send<TResponse>(request);
try
{
if (ApplyResponseFilters<TResponse>(request))
{
onSuccess(response);
return;
}
}
catch (Exception ex)
{
onError(response, ex);
return;
}
onSuccess(response);
}
catch (Exception ex)
{
if (onError != null)
{
onError(response, ex);
return;
}
Console.WriteLine("Error: " + ex.Message);
}
}
/// <summary>Sets the credentials.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <param name="userName">Name of the user.</param>
/// <param name="password">The password.</param>
public void SetCredentials(string userName, string password)
{
throw new NotImplementedException();
}
/// <summary>Gets the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request"> The request.</param>
/// <param name="onSuccess">The on success.</param>
/// <param name="onError"> The on error.</param>
public void GetAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Gets the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="onSuccess"> The on success.</param>
/// <param name="onError"> The on error.</param>
public void GetAsync<TResponse>(string relativeOrAbsoluteUrl, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Deletes the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="onSuccess"> The on success.</param>
/// <param name="onError"> The on error.</param>
public void DeleteAsync<TResponse>(string relativeOrAbsoluteUrl, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Deletes the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request"> The request.</param>
/// <param name="onSuccess">The on success.</param>
/// <param name="onError"> The on error.</param>
public void DeleteAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Posts the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request"> The request.</param>
/// <param name="onSuccess">The on success.</param>
/// <param name="onError"> The on error.</param>
public void PostAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Posts the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="request"> The request.</param>
/// <param name="onSuccess"> The on success.</param>
/// <param name="onError"> The on error.</param>
public void PostAsync<TResponse>(string relativeOrAbsoluteUrl, object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Puts the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="request"> The request.</param>
/// <param name="onSuccess">The on success.</param>
/// <param name="onError"> The on error.</param>
public void PutAsync<TResponse>(IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Puts the asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="request"> The request.</param>
/// <param name="onSuccess"> The on success.</param>
/// <param name="onError"> The on error.</param>
public void PutAsync<TResponse>(string relativeOrAbsoluteUrl, object request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Custom method asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="httpVerb"> The HTTP verb.</param>
/// <param name="request"> The request.</param>
/// <param name="onSuccess">The on success.</param>
/// <param name="onError"> The on error.</param>
public void CustomMethodAsync<TResponse>(string httpVerb, IReturn<TResponse> request, Action<TResponse> onSuccess, Action<TResponse, Exception> onError)
{
throw new NotImplementedException();
}
/// <summary>Cancel asynchronous.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
public void CancelAsync()
{
throw new NotImplementedException();
}
/// <summary>Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources.</summary>
public void Dispose() { }
/// <summary>Posts a file with request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="fileToUpload"> The file to upload.</param>
/// <param name="request"> The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse PostFileWithRequest<TResponse>(string relativeOrAbsoluteUrl, FileInfo fileToUpload, object request)
{
throw new NotImplementedException();
}
/// <summary>Posts a file with request.</summary>
///
/// <exception cref="NotImplementedException">Thrown when the requested operation is unimplemented.</exception>
///
/// <typeparam name="TResponse">Type of the response.</typeparam>
/// <param name="relativeOrAbsoluteUrl">URL of the relative or absolute.</param>
/// <param name="fileToUpload"> The file to upload.</param>
/// <param name="fileName"> Filename of the file.</param>
/// <param name="request"> The request.</param>
///
/// <returns>A TResponse.</returns>
public TResponse PostFileWithRequest<TResponse>(string relativeOrAbsoluteUrl, Stream fileToUpload, string fileName, object request)
{
throw new NotImplementedException();
}
}
}
| |
// ReSharper disable All
using System.Collections.Generic;
using System.Data;
using System.Dynamic;
using System.Linq;
using Frapid.Configuration;
using Frapid.DataAccess;
using Frapid.DataAccess.Models;
using Frapid.DbPolicy;
using Frapid.Framework.Extensions;
using Npgsql;
using Frapid.NPoco;
using Serilog;
namespace Frapid.Config.DataAccess
{
/// <summary>
/// Provides simplified data access features to perform SCRUD operation on the database table "config.kanbans".
/// </summary>
public class Kanban : DbAccess, IKanbanRepository
{
/// <summary>
/// The schema of this table. Returns literal "config".
/// </summary>
public override string _ObjectNamespace => "config";
/// <summary>
/// The schema unqualified name of this table. Returns literal "kanbans".
/// </summary>
public override string _ObjectName => "kanbans";
/// <summary>
/// Login id of application user accessing this table.
/// </summary>
public long _LoginId { get; set; }
/// <summary>
/// User id of application user accessing this table.
/// </summary>
public int _UserId { get; set; }
/// <summary>
/// The name of the database on which queries are being executed to.
/// </summary>
public string _Catalog { get; set; }
/// <summary>
/// Performs SQL count on the table "config.kanbans".
/// </summary>
/// <returns>Returns the number of rows of the table "config.kanbans".</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public long Count()
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return 0;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to count entity \"Kanban\" was denied to the user with Login ID {LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT COUNT(*) FROM config.kanbans;";
return Factory.Scalar<long>(this._Catalog, sql);
}
/// <summary>
/// Executes a select query on the table "config.kanbans" to return all instances of the "Kanban" class.
/// </summary>
/// <returns>Returns a non-live, non-mapped instances of "Kanban" class.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.Config.Entities.Kanban> GetAll()
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.ExportData, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the export entity \"Kanban\" was denied to the user with Login ID {LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans ORDER BY kanban_id;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql);
}
/// <summary>
/// Executes a select query on the table "config.kanbans" to return all instances of the "Kanban" class to export.
/// </summary>
/// <returns>Returns a non-live, non-mapped instances of "Kanban" class.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<dynamic> Export()
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.ExportData, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the export entity \"Kanban\" was denied to the user with Login ID {LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans ORDER BY kanban_id;";
return Factory.Get<dynamic>(this._Catalog, sql);
}
/// <summary>
/// Executes a select query on the table "config.kanbans" with a where filter on the column "kanban_id" to return a single instance of the "Kanban" class.
/// </summary>
/// <param name="kanbanId">The column "kanban_id" parameter used on where filter.</param>
/// <returns>Returns a non-live, non-mapped instance of "Kanban" class mapped to the database row.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public Frapid.Config.Entities.Kanban Get(long kanbanId)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the get entity \"Kanban\" filtered by \"KanbanId\" with value {KanbanId} was denied to the user with Login ID {_LoginId}", kanbanId, this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans WHERE kanban_id=@0;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql, kanbanId).FirstOrDefault();
}
/// <summary>
/// Gets the first record of the table "config.kanbans".
/// </summary>
/// <returns>Returns a non-live, non-mapped instance of "Kanban" class mapped to the database row.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public Frapid.Config.Entities.Kanban GetFirst()
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the get the first record of entity \"Kanban\" was denied to the user with Login ID {_LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans ORDER BY kanban_id LIMIT 1;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql).FirstOrDefault();
}
/// <summary>
/// Gets the previous record of the table "config.kanbans" sorted by kanbanId.
/// </summary>
/// <param name="kanbanId">The column "kanban_id" parameter used to find the next record.</param>
/// <returns>Returns a non-live, non-mapped instance of "Kanban" class mapped to the database row.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public Frapid.Config.Entities.Kanban GetPrevious(long kanbanId)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the get the previous entity of \"Kanban\" by \"KanbanId\" with value {KanbanId} was denied to the user with Login ID {_LoginId}", kanbanId, this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans WHERE kanban_id < @0 ORDER BY kanban_id DESC LIMIT 1;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql, kanbanId).FirstOrDefault();
}
/// <summary>
/// Gets the next record of the table "config.kanbans" sorted by kanbanId.
/// </summary>
/// <param name="kanbanId">The column "kanban_id" parameter used to find the next record.</param>
/// <returns>Returns a non-live, non-mapped instance of "Kanban" class mapped to the database row.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public Frapid.Config.Entities.Kanban GetNext(long kanbanId)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the get the next entity of \"Kanban\" by \"KanbanId\" with value {KanbanId} was denied to the user with Login ID {_LoginId}", kanbanId, this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans WHERE kanban_id > @0 ORDER BY kanban_id LIMIT 1;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql, kanbanId).FirstOrDefault();
}
/// <summary>
/// Gets the last record of the table "config.kanbans".
/// </summary>
/// <returns>Returns a non-live, non-mapped instance of "Kanban" class mapped to the database row.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public Frapid.Config.Entities.Kanban GetLast()
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the get the last record of entity \"Kanban\" was denied to the user with Login ID {_LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans ORDER BY kanban_id DESC LIMIT 1;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql).FirstOrDefault();
}
/// <summary>
/// Executes a select query on the table "config.kanbans" with a where filter on the column "kanban_id" to return a multiple instances of the "Kanban" class.
/// </summary>
/// <param name="kanbanIds">Array of column "kanban_id" parameter used on where filter.</param>
/// <returns>Returns a non-live, non-mapped collection of "Kanban" class mapped to the database row.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.Config.Entities.Kanban> Get(long[] kanbanIds)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to entity \"Kanban\" was denied to the user with Login ID {LoginId}. kanbanIds: {kanbanIds}.", this._LoginId, kanbanIds);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans WHERE kanban_id IN (@0);";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql, kanbanIds);
}
/// <summary>
/// Custom fields are user defined form elements for config.kanbans.
/// </summary>
/// <returns>Returns an enumerable custom field collection for the table config.kanbans</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.DataAccess.Models.CustomField> GetCustomFields(string resourceId)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to get custom fields for entity \"Kanban\" was denied to the user with Login ID {LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
string sql;
if (string.IsNullOrWhiteSpace(resourceId))
{
sql = "SELECT * FROM config.custom_field_definition_view WHERE table_name='config.kanbans' ORDER BY field_order;";
return Factory.Get<Frapid.DataAccess.Models.CustomField>(this._Catalog, sql);
}
sql = "SELECT * from config.get_custom_field_definition('config.kanbans'::text, @0::text) ORDER BY field_order;";
return Factory.Get<Frapid.DataAccess.Models.CustomField>(this._Catalog, sql, resourceId);
}
/// <summary>
/// Displayfields provide a minimal name/value context for data binding the row collection of config.kanbans.
/// </summary>
/// <returns>Returns an enumerable name and value collection for the table config.kanbans</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.DataAccess.Models.DisplayField> GetDisplayFields()
{
List<Frapid.DataAccess.Models.DisplayField> displayFields = new List<Frapid.DataAccess.Models.DisplayField>();
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return displayFields;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to get display field for entity \"Kanban\" was denied to the user with Login ID {LoginId}", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT kanban_id AS key, kanban_name as value FROM config.kanbans;";
using (NpgsqlCommand command = new NpgsqlCommand(sql))
{
using (DataTable table = DbOperation.GetDataTable(this._Catalog, command))
{
if (table?.Rows == null || table.Rows.Count == 0)
{
return displayFields;
}
foreach (DataRow row in table.Rows)
{
if (row != null)
{
DisplayField displayField = new DisplayField
{
Key = row["key"].ToString(),
Value = row["value"].ToString()
};
displayFields.Add(displayField);
}
}
}
}
return displayFields;
}
/// <summary>
/// Inserts or updates the instance of Kanban class on the database table "config.kanbans".
/// </summary>
/// <param name="kanban">The instance of "Kanban" class to insert or update.</param>
/// <param name="customFields">The custom field collection.</param>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public object AddOrEdit(dynamic kanban, List<Frapid.DataAccess.Models.CustomField> customFields)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
kanban.audit_user_id = this._UserId;
kanban.audit_ts = System.DateTime.UtcNow;
object primaryKeyValue = kanban.kanban_id;
if (Cast.To<long>(primaryKeyValue) > 0)
{
this.Update(kanban, Cast.To<long>(primaryKeyValue));
}
else
{
primaryKeyValue = this.Add(kanban);
}
string sql = "DELETE FROM config.custom_fields WHERE custom_field_setup_id IN(" +
"SELECT custom_field_setup_id " +
"FROM config.custom_field_setup " +
"WHERE form_name=config.get_custom_field_form_name('config.kanbans')" +
");";
Factory.NonQuery(this._Catalog, sql);
if (customFields == null)
{
return primaryKeyValue;
}
foreach (var field in customFields)
{
sql = "INSERT INTO config.custom_fields(custom_field_setup_id, resource_id, value) " +
"SELECT config.get_custom_field_setup_id_by_table_name('config.kanbans', @0::character varying(100)), " +
"@1, @2;";
Factory.NonQuery(this._Catalog, sql, field.FieldName, primaryKeyValue, field.Value);
}
return primaryKeyValue;
}
/// <summary>
/// Inserts the instance of Kanban class on the database table "config.kanbans".
/// </summary>
/// <param name="kanban">The instance of "Kanban" class to insert.</param>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public object Add(dynamic kanban)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Create, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to add entity \"Kanban\" was denied to the user with Login ID {LoginId}. {Kanban}", this._LoginId, kanban);
throw new UnauthorizedException("Access is denied.");
}
}
return Factory.Insert(this._Catalog, kanban, "config.kanbans", "kanban_id");
}
/// <summary>
/// Inserts or updates multiple instances of Kanban class on the database table "config.kanbans";
/// </summary>
/// <param name="kanbans">List of "Kanban" class to import.</param>
/// <returns></returns>
public List<object> BulkImport(List<ExpandoObject> kanbans)
{
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.ImportData, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to import entity \"Kanban\" was denied to the user with Login ID {LoginId}. {kanbans}", this._LoginId, kanbans);
throw new UnauthorizedException("Access is denied.");
}
}
var result = new List<object>();
int line = 0;
try
{
using (Database db = new Database(ConnectionString.GetConnectionString(this._Catalog), Factory.ProviderName))
{
using (ITransaction transaction = db.GetTransaction())
{
foreach (dynamic kanban in kanbans)
{
line++;
kanban.audit_user_id = this._UserId;
kanban.audit_ts = System.DateTime.UtcNow;
object primaryKeyValue = kanban.kanban_id;
if (Cast.To<long>(primaryKeyValue) > 0)
{
result.Add(kanban.kanban_id);
db.Update("config.kanbans", "kanban_id", kanban, kanban.kanban_id);
}
else
{
result.Add(db.Insert("config.kanbans", "kanban_id", kanban));
}
}
transaction.Complete();
}
return result;
}
}
catch (NpgsqlException ex)
{
string errorMessage = $"Error on line {line} ";
if (ex.Code.StartsWith("P"))
{
errorMessage += Factory.GetDbErrorResource(ex);
throw new DataAccessException(errorMessage, ex);
}
errorMessage += ex.Message;
throw new DataAccessException(errorMessage, ex);
}
catch (System.Exception ex)
{
string errorMessage = $"Error on line {line} ";
throw new DataAccessException(errorMessage, ex);
}
}
/// <summary>
/// Updates the row of the table "config.kanbans" with an instance of "Kanban" class against the primary key value.
/// </summary>
/// <param name="kanban">The instance of "Kanban" class to update.</param>
/// <param name="kanbanId">The value of the column "kanban_id" which will be updated.</param>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public void Update(dynamic kanban, long kanbanId)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Edit, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to edit entity \"Kanban\" with Primary Key {PrimaryKey} was denied to the user with Login ID {LoginId}. {Kanban}", kanbanId, this._LoginId, kanban);
throw new UnauthorizedException("Access is denied.");
}
}
Factory.Update(this._Catalog, kanban, kanbanId, "config.kanbans", "kanban_id");
}
/// <summary>
/// Deletes the row of the table "config.kanbans" against the primary key value.
/// </summary>
/// <param name="kanbanId">The value of the column "kanban_id" which will be deleted.</param>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public void Delete(long kanbanId)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Delete, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to delete entity \"Kanban\" with Primary Key {PrimaryKey} was denied to the user with Login ID {LoginId}.", kanbanId, this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "DELETE FROM config.kanbans WHERE kanban_id=@0;";
Factory.NonQuery(this._Catalog, sql, kanbanId);
}
/// <summary>
/// Performs a select statement on table "config.kanbans" producing a paginated result of 10.
/// </summary>
/// <returns>Returns the first page of collection of "Kanban" class.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.Config.Entities.Kanban> GetPaginatedResult()
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to the first page of the entity \"Kanban\" was denied to the user with Login ID {LoginId}.", this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
const string sql = "SELECT * FROM config.kanbans ORDER BY kanban_id LIMIT 10 OFFSET 0;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql);
}
/// <summary>
/// Performs a select statement on table "config.kanbans" producing a paginated result of 10.
/// </summary>
/// <param name="pageNumber">Enter the page number to produce the paginated result.</param>
/// <returns>Returns collection of "Kanban" class.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.Config.Entities.Kanban> GetPaginatedResult(long pageNumber)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to Page #{Page} of the entity \"Kanban\" was denied to the user with Login ID {LoginId}.", pageNumber, this._LoginId);
throw new UnauthorizedException("Access is denied.");
}
}
long offset = (pageNumber - 1) * 10;
const string sql = "SELECT * FROM config.kanbans ORDER BY kanban_id LIMIT 10 OFFSET @0;";
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql, offset);
}
public List<Frapid.DataAccess.Models.Filter> GetFilters(string catalog, string filterName)
{
const string sql = "SELECT * FROM config.filters WHERE object_name='config.kanbans' AND lower(filter_name)=lower(@0);";
return Factory.Get<Frapid.DataAccess.Models.Filter>(catalog, sql, filterName).ToList();
}
/// <summary>
/// Performs a filtered count on table "config.kanbans".
/// </summary>
/// <param name="filters">The list of filter conditions.</param>
/// <returns>Returns number of rows of "Kanban" class using the filter.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public long CountWhere(List<Frapid.DataAccess.Models.Filter> filters)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return 0;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to count entity \"Kanban\" was denied to the user with Login ID {LoginId}. Filters: {Filters}.", this._LoginId, filters);
throw new UnauthorizedException("Access is denied.");
}
}
Sql sql = Sql.Builder.Append("SELECT COUNT(*) FROM config.kanbans WHERE 1 = 1");
Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Config.Entities.Kanban(), filters);
return Factory.Scalar<long>(this._Catalog, sql);
}
/// <summary>
/// Performs a filtered select statement on table "config.kanbans" producing a paginated result of 10.
/// </summary>
/// <param name="pageNumber">Enter the page number to produce the paginated result. If you provide a negative number, the result will not be paginated.</param>
/// <param name="filters">The list of filter conditions.</param>
/// <returns>Returns collection of "Kanban" class.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.Config.Entities.Kanban> GetWhere(long pageNumber, List<Frapid.DataAccess.Models.Filter> filters)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to Page #{Page} of the filtered entity \"Kanban\" was denied to the user with Login ID {LoginId}. Filters: {Filters}.", pageNumber, this._LoginId, filters);
throw new UnauthorizedException("Access is denied.");
}
}
long offset = (pageNumber - 1) * 10;
Sql sql = Sql.Builder.Append("SELECT * FROM config.kanbans WHERE 1 = 1");
Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Config.Entities.Kanban(), filters);
sql.OrderBy("kanban_id");
if (pageNumber > 0)
{
sql.Append("LIMIT @0", 10);
sql.Append("OFFSET @0", offset);
}
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql);
}
/// <summary>
/// Performs a filtered count on table "config.kanbans".
/// </summary>
/// <param name="filterName">The named filter.</param>
/// <returns>Returns number of rows of "Kanban" class using the filter.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public long CountFiltered(string filterName)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return 0;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to count entity \"Kanban\" was denied to the user with Login ID {LoginId}. Filter: {Filter}.", this._LoginId, filterName);
throw new UnauthorizedException("Access is denied.");
}
}
List<Frapid.DataAccess.Models.Filter> filters = this.GetFilters(this._Catalog, filterName);
Sql sql = Sql.Builder.Append("SELECT COUNT(*) FROM config.kanbans WHERE 1 = 1");
Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Config.Entities.Kanban(), filters);
return Factory.Scalar<long>(this._Catalog, sql);
}
/// <summary>
/// Performs a filtered select statement on table "config.kanbans" producing a paginated result of 10.
/// </summary>
/// <param name="pageNumber">Enter the page number to produce the paginated result. If you provide a negative number, the result will not be paginated.</param>
/// <param name="filterName">The named filter.</param>
/// <returns>Returns collection of "Kanban" class.</returns>
/// <exception cref="UnauthorizedException">Thown when the application user does not have sufficient privilege to perform this action.</exception>
public IEnumerable<Frapid.Config.Entities.Kanban> GetFiltered(long pageNumber, string filterName)
{
if (string.IsNullOrWhiteSpace(this._Catalog))
{
return null;
}
if (!this.SkipValidation)
{
if (!this.Validated)
{
this.Validate(AccessTypeEnum.Read, this._LoginId, this._Catalog, false);
}
if (!this.HasAccess)
{
Log.Information("Access to Page #{Page} of the filtered entity \"Kanban\" was denied to the user with Login ID {LoginId}. Filter: {Filter}.", pageNumber, this._LoginId, filterName);
throw new UnauthorizedException("Access is denied.");
}
}
List<Frapid.DataAccess.Models.Filter> filters = this.GetFilters(this._Catalog, filterName);
long offset = (pageNumber - 1) * 10;
Sql sql = Sql.Builder.Append("SELECT * FROM config.kanbans WHERE 1 = 1");
Frapid.DataAccess.FilterManager.AddFilters(ref sql, new Frapid.Config.Entities.Kanban(), filters);
sql.OrderBy("kanban_id");
if (pageNumber > 0)
{
sql.Append("LIMIT @0", 10);
sql.Append("OFFSET @0", offset);
}
return Factory.Get<Frapid.Config.Entities.Kanban>(this._Catalog, sql);
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
//--------------------------------------------------------------------------
// Sounds
//--------------------------------------------------------------------------
datablock SFXProfile(LurkerFireSound)
{
filename = "data/FPSGameplay/sound/weapons/wpn_lurker_fire";
description = AudioClose3D;
preload = true;
};
datablock SFXProfile(LurkerReloadSound)
{
filename = "data/FPSGameplay/sound/weapons/wpn_lurker_reload";
description = AudioClose3D;
preload = true;
};
datablock SFXProfile(LurkerIdleSound)
{
filename = "data/FPSGameplay/sound/weapons/wpn_lurker_idle";
description = AudioClose3D;
preload = true;
};
datablock SFXProfile(LurkerSwitchinSound)
{
filename = "data/FPSGameplay/sound/weapons/wpn_lurker_switchin";
description = AudioClose3D;
preload = true;
};
datablock SFXProfile(LurkerGrenadeFireSound)
{
filename = "data/FPSGameplay/sound/weapons/wpn_lurker_grenadelaunch";
description = AudioClose3D;
preload = true;
};
datablock SFXPlayList(LurkerFireSoundList)
{
// Use a looped description so the list playback will loop.
description = AudioClose3D;
track[ 0 ] = LurkerFireSound;
};
/*datablock SFXProfile(BulletImpactSound)
{
filename = "data/FPSGameplay/sound/weapons/SCARFIRE";
description = AudioClose3D;
preload = true;
};*/
// ----------------------------------------------------------------------------
// Particles
// ----------------------------------------------------------------------------
datablock ParticleData(GunFireSmoke)
{
textureName = "data/FPSGameplay/art/particles/smoke";
dragCoefficient = 0;
gravityCoefficient = "-1";
windCoefficient = 0;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 500;
lifetimeVarianceMS = 200;
spinRandomMin = -180.0;
spinRandomMax = 180.0;
useInvAlpha = true;
colors[0] = "0.795276 0.795276 0.795276 0.692913";
colors[1] = "0.866142 0.866142 0.866142 0.346457";
colors[2] = "0.897638 0.834646 0.795276 0";
sizes[0] = "0.399805";
sizes[1] = "1.19941";
sizes[2] = "1.69993";
times[0] = 0.0;
times[1] = "0.498039";
times[2] = 1.0;
animTexName = "data/FPSGameplay/art/particles/smoke";
};
datablock ParticleEmitterData(GunFireSmokeEmitter)
{
ejectionPeriodMS = 20;
periodVarianceMS = 10;
ejectionVelocity = "0";
velocityVariance = "0";
thetaMin = "0";
thetaMax = "0";
lifetimeMS = 250;
particles = "GunFireSmoke";
blendStyle = "NORMAL";
softParticles = "0";
originalName = "GunFireSmokeEmitter";
alignParticles = "0";
orientParticles = "0";
};
datablock ParticleData(BulletDirtDust)
{
textureName = "data/FPSGameplay/art/particles/impact";
dragCoefficient = "1";
gravityCoefficient = "-0.100122";
windCoefficient = 0;
inheritedVelFactor = 0.0;
constantAcceleration = "-0.83";
lifetimeMS = 800;
lifetimeVarianceMS = 300;
spinRandomMin = -180.0;
spinRandomMax = 180.0;
useInvAlpha = true;
colors[0] = "0.496063 0.393701 0.299213 0.692913";
colors[1] = "0.692913 0.614173 0.535433 0.346457";
colors[2] = "0.897638 0.84252 0.795276 0";
sizes[0] = "0.997986";
sizes[1] = "2";
sizes[2] = "2.5";
times[0] = 0.0;
times[1] = "0.498039";
times[2] = 1.0;
animTexName = "data/FPSGameplay/art/particles/impact";
};
datablock ParticleEmitterData(BulletDirtDustEmitter)
{
ejectionPeriodMS = 20;
periodVarianceMS = 10;
ejectionVelocity = "1";
velocityVariance = 1.0;
thetaMin = 0.0;
thetaMax = 180.0;
lifetimeMS = 250;
particles = "BulletDirtDust";
blendStyle = "NORMAL";
};
//-----------------------------------------------------------------------------
// Explosion
//-----------------------------------------------------------------------------
datablock ExplosionData(BulletDirtExplosion)
{
soundProfile = BulletImpactSound;
lifeTimeMS = 65;
// Volume particles
particleEmitter = BulletDirtDustEmitter;
particleDensity = 4;
particleRadius = 0.3;
// Point emission
emitter[0] = BulletDirtSprayEmitter;
emitter[1] = BulletDirtSprayEmitter;
emitter[2] = BulletDirtRocksEmitter;
};
//--------------------------------------------------------------------------
// Shell ejected during reload.
//-----------------------------------------------------------------------------
datablock DebrisData(BulletShell)
{
shapeFile = "data/FPSGameplay/art/shapes/weapons/shared/RifleShell.DAE";
lifetime = 6.0;
minSpinSpeed = 300.0;
maxSpinSpeed = 400.0;
elasticity = 0.65;
friction = 0.05;
numBounces = 5;
staticOnMaxBounce = true;
snapOnMaxBounce = false;
ignoreWater = true;
fade = true;
};
//-----------------------------------------------------------------------------
// Projectile Object
//-----------------------------------------------------------------------------
datablock LightDescription( BulletProjectileLightDesc )
{
color = "0.0 0.5 0.7";
range = 3.0;
};
datablock ProjectileData( BulletProjectile )
{
projectileShapeName = "";
directDamage = 5;
radiusDamage = 0;
damageRadius = 0.5;
areaImpulse = 0.5;
impactForce = 1;
explosion = BulletDirtExplosion;
decal = BulletHoleDecal;
muzzleVelocity = 120;
velInheritFactor = 1;
armingDelay = 0;
lifetime = 992;
fadeDelay = 1472;
bounceElasticity = 0;
bounceFriction = 0;
isBallistic = false;
gravityMod = 1;
};
function BulletProjectile::onCollision(%this,%obj,%col,%fade,%pos,%normal)
{
// Apply impact force from the projectile.
// Apply damage to the object all shape base objects
if ( %col.getType() & $TypeMasks::GameBaseObjectType )
%col.damage(%obj,%pos,%this.directDamage,"BulletProjectile");
}
//-----------------------------------------------------------------------------
// Ammo Item
//-----------------------------------------------------------------------------
datablock ItemData(LurkerClip)
{
// Mission editor category
category = "AmmoClip";
// Add the Ammo namespace as a parent. The ammo namespace provides
// common ammo related functions and hooks into the inventory system.
className = "AmmoClip";
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
mass = 1;
elasticity = 0.2;
friction = 0.6;
// Dynamic properties defined by the scripts
pickUpName = "Lurker clip";
count = 1;
maxInventory = 10;
};
datablock ItemData(LurkerAmmo)
{
// Mission editor category
category = "Ammo";
// Add the Ammo namespace as a parent. The ammo namespace provides
// common ammo related functions and hooks into the inventory system.
className = "Ammo";
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
mass = 1;
elasticity = 0.2;
friction = 0.6;
// Dynamic properties defined by the scripts
pickUpName = "Lurker ammo";
maxInventory = 30;
clip = LurkerClip;
};
//--------------------------------------------------------------------------
// Weapon Item. This is the item that exists in the world, i.e. when it's
// been dropped, thrown or is acting as re-spawnable item. When the weapon
// is mounted onto a shape, the LurkerWeaponImage is used.
//-----------------------------------------------------------------------------
datablock ItemData(Lurker)
{
// Mission editor category
category = "Weapon";
// Hook into Item Weapon class hierarchy. The weapon namespace
// provides common weapon handling functions in addition to hooks
// into the inventory system.
className = "Weapon";
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
mass = 1;
elasticity = 0.2;
friction = 0.6;
emap = true;
// Dynamic properties defined by the scripts
PreviewImage = 'lurker.png';
pickUpName = "Lurker rifle";
description = "Lurker";
image = LurkerWeaponImage;
reticle = "crossHair";
};
datablock ShapeBaseImageData(LurkerWeaponImage)
{
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
shapeFileFP = "data/FPSGameplay/art/shapes/weapons/Lurker/FP_Lurker.DAE";
emap = true;
imageAnimPrefix = "Rifle";
imageAnimPrefixFP = "Rifle";
// Specify mount point & offset for 3rd person, and eye offset
// for first person rendering.
mountPoint = 0;
firstPerson = true;
useEyeNode = true;
animateOnServer = true;
// When firing from a point offset from the eye, muzzle correction
// will adjust the muzzle vector to point to the eye LOS point.
// Since this weapon doesn't actually fire from the muzzle point,
// we need to turn this off.
correctMuzzleVector = true;
// Add the WeaponImage namespace as a parent, WeaponImage namespace
// provides some hooks into the inventory system.
class = "WeaponImage";
className = "WeaponImage";
// Projectiles and Ammo.
item = Lurker;
ammo = LurkerAmmo;
clip = LurkerClip;
projectile = BulletProjectile;
projectileType = Projectile;
projectileSpread = "0.005";
altProjectile = GrenadeLauncherProjectile;
altProjectileSpread = "0.02";
casing = BulletShell;
shellExitDir = "1.0 0.3 1.0";
shellExitOffset = "0.15 -0.56 -0.1";
shellExitVariance = 15.0;
shellVelocity = 3.0;
// Weapon lights up while firing
lightType = "WeaponFireLight";
lightColor = "0.992126 0.968504 0.708661 1";
lightRadius = "4";
lightDuration = "100";
lightBrightness = 2;
// Shake camera while firing.
shakeCamera = false;
camShakeFreq = "0 0 0";
camShakeAmp = "0 0 0";
// Images have a state system which controls how the animations
// are run, which sounds are played, script callbacks, etc. This
// state system is downloaded to the client so that clients can
// predict state changes and animate accordingly. The following
// system supports basic ready->fire->reload transitions as
// well as a no-ammo->dryfire idle state.
useRemainderDT = true;
// Initial start up state
stateName[0] = "Preactivate";
stateTransitionOnLoaded[0] = "Activate";
stateTransitionOnNoAmmo[0] = "NoAmmo";
// Activating the gun. Called when the weapon is first
// mounted and there is ammo.
stateName[1] = "Activate";
stateTransitionGeneric0In[1] = "SprintEnter";
stateTransitionOnTimeout[1] = "Ready";
stateTimeoutValue[1] = 0.5;
stateSequence[1] = "switch_in";
stateSound[1] = LurkerSwitchinSound;
// Ready to fire, just waiting for the trigger
stateName[2] = "Ready";
stateTransitionGeneric0In[2] = "SprintEnter";
stateTransitionOnMotion[2] = "ReadyMotion";
stateTransitionOnTimeout[2] = "ReadyFidget";
stateTimeoutValue[2] = 10;
stateWaitForTimeout[2] = false;
stateScaleAnimation[2] = false;
stateScaleAnimationFP[2] = false;
stateTransitionOnNoAmmo[2] = "NoAmmo";
stateTransitionOnTriggerDown[2] = "Fire";
stateSequence[2] = "idle";
// Same as Ready state but plays a fidget sequence
stateName[3] = "ReadyFidget";
stateTransitionGeneric0In[3] = "SprintEnter";
stateTransitionOnMotion[3] = "ReadyMotion";
stateTransitionOnTimeout[3] = "Ready";
stateTimeoutValue[3] = 6;
stateWaitForTimeout[3] = false;
stateTransitionOnNoAmmo[3] = "NoAmmo";
stateTransitionOnTriggerDown[3] = "Fire";
stateSequence[3] = "idle_fidget1";
stateSound[3] = LurkerIdleSound;
// Ready to fire with player moving
stateName[4] = "ReadyMotion";
stateTransitionGeneric0In[4] = "SprintEnter";
stateTransitionOnNoMotion[4] = "Ready";
stateWaitForTimeout[4] = false;
stateScaleAnimation[4] = false;
stateScaleAnimationFP[4] = false;
stateSequenceTransitionIn[4] = true;
stateSequenceTransitionOut[4] = true;
stateTransitionOnNoAmmo[4] = "NoAmmo";
stateTransitionOnTriggerDown[4] = "Fire";
stateSequence[4] = "run";
// Fire the weapon. Calls the fire script which does
// the actual work.
stateName[5] = "Fire";
stateTransitionGeneric0In[5] = "SprintEnter";
stateTransitionOnTimeout[5] = "NewRound";
stateTimeoutValue[5] = 0.15;
stateFire[5] = true;
stateRecoil[5] = "";
stateAllowImageChange[5] = false;
stateSequence[5] = "fire";
stateScaleAnimation[5] = false;
stateSequenceNeverTransition[5] = true;
stateSequenceRandomFlash[5] = true; // use muzzle flash sequence
stateScript[5] = "onFire";
stateSound[5] = LurkerFireSoundList;
stateEmitter[5] = GunFireSmokeEmitter;
stateEmitterTime[5] = 0.025;
// Put another round into the chamber if one is available
stateName[6] = "NewRound";
stateTransitionGeneric0In[6] = "SprintEnter";
stateTransitionOnNoAmmo[6] = "NoAmmo";
stateTransitionOnTimeout[6] = "Ready";
stateWaitForTimeout[6] = "0";
stateTimeoutValue[6] = 0.05;
stateAllowImageChange[6] = false;
stateEjectShell[6] = true;
// No ammo in the weapon, just idle until something
// shows up. Play the dry fire sound if the trigger is
// pulled.
stateName[7] = "NoAmmo";
stateTransitionGeneric0In[7] = "SprintEnter";
stateTransitionOnMotion[7] = "NoAmmoMotion";
stateTransitionOnAmmo[7] = "ReloadClip";
stateTimeoutValue[7] = 0.1; // Slight pause to allow script to run when trigger is still held down from Fire state
stateScript[7] = "onClipEmpty";
stateSequence[7] = "idle";
stateScaleAnimation[7] = false;
stateScaleAnimationFP[7] = false;
stateTransitionOnTriggerDown[7] = "DryFire";
stateName[8] = "NoAmmoMotion";
stateTransitionGeneric0In[8] = "SprintEnter";
stateTransitionOnNoMotion[8] = "NoAmmo";
stateWaitForTimeout[8] = false;
stateScaleAnimation[8] = false;
stateScaleAnimationFP[8] = false;
stateSequenceTransitionIn[8] = true;
stateSequenceTransitionOut[8] = true;
stateTransitionOnTriggerDown[8] = "DryFire";
stateTransitionOnAmmo[8] = "ReloadClip";
stateSequence[8] = "run";
// No ammo dry fire
stateName[9] = "DryFire";
stateTransitionGeneric0In[9] = "SprintEnter";
stateTransitionOnAmmo[9] = "ReloadClip";
stateWaitForTimeout[9] = "0";
stateTimeoutValue[9] = 0.7;
stateTransitionOnTimeout[9] = "NoAmmo";
stateScript[9] = "onDryFire";
stateSound[9] = MachineGunDryFire;
// Play the reload clip animation
stateName[10] = "ReloadClip";
stateTransitionGeneric0In[10] = "SprintEnter";
stateTransitionOnTimeout[10] = "Ready";
stateWaitForTimeout[10] = true;
stateTimeoutValue[10] = 3.0;
stateReload[10] = true;
stateSequence[10] = "reload";
stateShapeSequence[10] = "Reload";
stateScaleShapeSequence[10] = true;
stateSound[10] = LurkerReloadSound;
// Start Sprinting
stateName[11] = "SprintEnter";
stateTransitionGeneric0Out[11] = "SprintExit";
stateTransitionOnTimeout[11] = "Sprinting";
stateWaitForTimeout[11] = false;
stateTimeoutValue[11] = 0.5;
stateWaitForTimeout[11] = false;
stateScaleAnimation[11] = false;
stateScaleAnimationFP[11] = false;
stateSequenceTransitionIn[11] = true;
stateSequenceTransitionOut[11] = true;
stateAllowImageChange[11] = false;
stateSequence[11] = "sprint";
// Sprinting
stateName[12] = "Sprinting";
stateTransitionGeneric0Out[12] = "SprintExit";
stateWaitForTimeout[12] = false;
stateScaleAnimation[12] = false;
stateScaleAnimationFP[12] = false;
stateSequenceTransitionIn[12] = true;
stateSequenceTransitionOut[12] = true;
stateAllowImageChange[12] = false;
stateSequence[12] = "sprint";
// Stop Sprinting
stateName[13] = "SprintExit";
stateTransitionGeneric0In[13] = "SprintEnter";
stateTransitionOnTimeout[13] = "Ready";
stateWaitForTimeout[13] = false;
stateTimeoutValue[13] = 0.5;
stateSequenceTransitionIn[13] = true;
stateSequenceTransitionOut[13] = true;
stateAllowImageChange[13] = false;
stateSequence[13] = "sprint";
};
//--------------------------------------------------------------------------
// Lurker Grenade Launcher
//--------------------------------------------------------------------------
datablock ItemData(LurkerGrenadeAmmo)
{
// Mission editor category
category = "Ammo";
// Add the Ammo namespace as a parent. The ammo namespace provides
// common ammo related functions and hooks into the inventory system.
className = "Ammo";
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
mass = 1;
elasticity = 0.2;
friction = 0.6;
// Dynamic properties defined by the scripts
pickUpName = "Lurker grenade ammo";
maxInventory = 20;
};
datablock ItemData(LurkerGrenadeLauncher)
{
// Mission editor category
category = "Weapon";
// Hook into Item Weapon class hierarchy. The weapon namespace
// provides common weapon handling functions in addition to hooks
// into the inventory system.
className = "Weapon";
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
mass = 1;
elasticity = 0.2;
friction = 0.6;
emap = true;
// Dynamic properties defined by the scripts
PreviewImage = 'lurker.png';
pickUpName = "a Lurker grenade launcher";
description = "Lurker Grenade Launcher";
image = LurkerGrenadeLauncherImage;
reticle = "crossHair";
};
datablock ShapeBaseImageData(LurkerGrenadeLauncherImage)
{
// Basic Item properties
shapeFile = "data/FPSGameplay/art/shapes/weapons/Lurker/TP_Lurker.DAE";
shapeFileFP = "data/FPSGameplay/art/shapes/weapons/Lurker/FP_Lurker.DAE";
emap = true;
imageAnimPrefix = "Rifle";
imageAnimPrefixFP = "Rifle";
// Specify mount point & offset for 3rd person, and eye offset
// for first person rendering.
mountPoint = 0;
firstPerson = true;
useEyeNode = true;
animateOnServer = true;
// When firing from a point offset from the eye, muzzle correction
// will adjust the muzzle vector to point to the eye LOS point.
// Since this weapon doesn't actually fire from the muzzle point,
// we need to turn this off.
correctMuzzleVector = true;
// Add the WeaponImage namespace as a parent, WeaponImage namespace
// provides some hooks into the inventory system.
class = "WeaponImage";
className = "WeaponImage";
// Projectiles and Ammo.
item = LurkerGrenadeLauncher;
ammo = LurkerGrenadeAmmo;
projectile = GrenadeLauncherProjectile;
projectileType = Projectile;
projectileSpread = "0.02";
// Weapon lights up while firing
lightType = "WeaponFireLight";
lightColor = "0.992126 0.968504 0.708661 1";
lightRadius = "4";
lightDuration = "100";
lightBrightness = 2;
// Shake camera while firing.
shakeCamera = false;
camShakeFreq = "0 0 0";
camShakeAmp = "0 0 0";
// Images have a state system which controls how the animations
// are run, which sounds are played, script callbacks, etc. This
// state system is downloaded to the client so that clients can
// predict state changes and animate accordingly. The following
// system supports basic ready->fire->reload transitions as
// well as a no-ammo->dryfire idle state.
useRemainderDT = true;
// Initial start up state
stateName[0] = "Preactivate";
stateTransitionOnLoaded[0] = "Activate";
stateTransitionOnNoAmmo[0] = "NoAmmo";
// Activating the gun. Called when the weapon is first
// mounted and there is ammo.
stateName[1] = "Activate";
stateTransitionGeneric0In[1] = "SprintEnter";
stateTransitionOnTimeout[1] = "Ready";
stateTimeoutValue[1] = 0.5;
stateSequence[1] = "switch_in";
stateSound[1] = LurkerSwitchinSound;
// Ready to fire, just waiting for the trigger
stateName[2] = "Ready";
stateTransitionGeneric0In[2] = "SprintEnter";
stateTransitionOnMotion[2] = "ReadyMotion";
stateTransitionOnTimeout[2] = "ReadyFidget";
stateTimeoutValue[2] = 10;
stateWaitForTimeout[2] = false;
stateScaleAnimation[2] = false;
stateScaleAnimationFP[2] = false;
stateTransitionOnNoAmmo[2] = "NoAmmo";
stateTransitionOnTriggerDown[2] = "Fire";
stateSequence[2] = "idle";
// Same as Ready state but plays a fidget sequence
stateName[3] = "ReadyFidget";
stateTransitionGeneric0In[3] = "SprintEnter";
stateTransitionOnMotion[3] = "ReadyMotion";
stateTransitionOnTimeout[3] = "Ready";
stateTimeoutValue[3] = 6;
stateWaitForTimeout[3] = false;
stateTransitionOnNoAmmo[3] = "NoAmmo";
stateTransitionOnTriggerDown[3] = "Fire";
stateSequence[3] = "idle_fidget1";
stateSound[3] = LurkerIdleSound;
// Ready to fire with player moving
stateName[4] = "ReadyMotion";
stateTransitionGeneric0In[4] = "SprintEnter";
stateTransitionOnNoMotion[4] = "Ready";
stateWaitForTimeout[4] = false;
stateScaleAnimation[4] = false;
stateScaleAnimationFP[4] = false;
stateSequenceTransitionIn[4] = true;
stateSequenceTransitionOut[4] = true;
stateTransitionOnNoAmmo[4] = "NoAmmo";
stateTransitionOnTriggerDown[4] = "Fire";
stateSequence[4] = "run";
// Fire the weapon. Calls the fire script which does
// the actual work.
stateName[5] = "Fire";
stateTransitionGeneric0In[5] = "SprintEnter";
stateTransitionOnTimeout[5] = "NewRound";
stateTimeoutValue[5] = 1.2;
stateFire[5] = true;
stateRecoil[5] = "";
stateAllowImageChange[5] = false;
stateSequence[5] = "fire_alt";
stateScaleAnimation[5] = true;
stateSequenceNeverTransition[5] = true;
stateSequenceRandomFlash[5] = true; // use muzzle flash sequence
stateScript[5] = "onFire";
stateSound[5] = LurkerGrenadeFireSound;
stateEmitter[5] = GunFireSmokeEmitter;
stateEmitterTime[5] = 0.025;
stateEjectShell[5] = true;
// Put another round into the chamber
stateName[6] = "NewRound";
stateTransitionGeneric0In[6] = "SprintEnter";
stateTransitionOnNoAmmo[6] = "NoAmmo";
stateTransitionOnTimeout[6] = "Ready";
stateWaitForTimeout[6] = "0";
stateTimeoutValue[6] = 0.05;
stateAllowImageChange[6] = false;
// No ammo in the weapon, just idle until something
// shows up. Play the dry fire sound if the trigger is
// pulled.
stateName[7] = "NoAmmo";
stateTransitionGeneric0In[7] = "SprintEnter";
stateTransitionOnMotion[7] = "NoAmmoReadyMotion";
stateTransitionOnAmmo[7] = "ReloadClip";
stateTimeoutValue[7] = 0.1; // Slight pause to allow script to run when trigger is still held down from Fire state
stateScript[7] = "onClipEmpty";
stateSequence[7] = "idle";
stateScaleAnimation[7] = false;
stateScaleAnimationFP[7] = false;
stateTransitionOnTriggerDown[7] = "DryFire";
stateName[8] = "NoAmmoReadyMotion";
stateTransitionGeneric0In[8] = "SprintEnter";
stateTransitionOnNoMotion[8] = "NoAmmo";
stateWaitForTimeout[8] = false;
stateScaleAnimation[8] = false;
stateScaleAnimationFP[8] = false;
stateSequenceTransitionIn[8] = true;
stateSequenceTransitionOut[8] = true;
stateTransitionOnAmmo[8] = "ReloadClip";
stateTransitionOnTriggerDown[8] = "DryFire";
stateSequence[8] = "run";
// No ammo dry fire
stateName[9] = "DryFire";
stateTransitionGeneric0In[9] = "SprintEnter";
stateTimeoutValue[9] = 1.0;
stateTransitionOnTimeout[9] = "NoAmmo";
stateScript[9] = "onDryFire";
// Play the reload clip animation
stateName[10] = "ReloadClip";
stateTransitionGeneric0In[10] = "SprintEnter";
stateTransitionOnTimeout[10] = "Ready";
stateWaitForTimeout[10] = true;
stateTimeoutValue[10] = 3.0;
stateReload[10] = true;
stateSequence[10] = "reload";
stateShapeSequence[10] = "Reload";
stateScaleShapeSequence[10] = true;
// Start Sprinting
stateName[11] = "SprintEnter";
stateTransitionGeneric0Out[11] = "SprintExit";
stateTransitionOnTimeout[11] = "Sprinting";
stateWaitForTimeout[11] = false;
stateTimeoutValue[11] = 0.5;
stateWaitForTimeout[11] = false;
stateScaleAnimation[11] = false;
stateScaleAnimationFP[11] = false;
stateSequenceTransitionIn[11] = true;
stateSequenceTransitionOut[11] = true;
stateAllowImageChange[11] = false;
stateSequence[11] = "sprint";
// Sprinting
stateName[12] = "Sprinting";
stateTransitionGeneric0Out[12] = "SprintExit";
stateWaitForTimeout[12] = false;
stateScaleAnimation[12] = false;
stateScaleAnimationFP[12] = false;
stateSequenceTransitionIn[12] = true;
stateSequenceTransitionOut[12] = true;
stateAllowImageChange[12] = false;
stateSequence[12] = "sprint";
// Stop Sprinting
stateName[13] = "SprintExit";
stateTransitionGeneric0In[13] = "SprintEnter";
stateTransitionOnTimeout[13] = "Ready";
stateWaitForTimeout[13] = false;
stateTimeoutValue[13] = 0.5;
stateSequenceTransitionIn[13] = true;
stateSequenceTransitionOut[13] = true;
stateAllowImageChange[13] = false;
stateSequence[13] = "sprint";
};
| |
#pragma warning disable 169
// ReSharper disable InconsistentNaming
namespace NEventStore.Persistence.AcceptanceTests
{
using System;
using System.Collections.Generic;
using System.Linq;
using FluentAssertions;
using NEventStore.Diagnostics;
using NEventStore.Persistence.AcceptanceTests.BDD;
using NEventStore.Persistence.InMemory;
using Xunit;
public class when_a_commit_header_has_a_name_that_contains_a_period : PersistenceEngineConcern
{
private ICommit _persisted;
private string _streamId;
public when_a_commit_header_has_a_name_that_contains_a_period(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_streamId = Guid.NewGuid().ToString();
var attempt = new CommitAttempt(_streamId,
2,
Guid.NewGuid(),
1,
DateTime.Now,
new Dictionary<string, object> { { "key.1", "value" } },
new List<EventMessage> { new EventMessage { Body = new ExtensionMethods.SomeDomainEvent { SomeProperty = "Test" } } });
Persistence.Commit(attempt);
}
protected override void Because()
{
_persisted = Persistence.GetFrom(_streamId, 0, int.MaxValue).First();
}
[Fact]
public void should_correctly_deserialize_headers()
{
_persisted.Headers.Keys.Should().Contain("key.1");
}
}
public class when_a_commit_is_successfully_persisted : PersistenceEngineConcern
{
private CommitAttempt Attempt
{
get
{ return Fixture.Variables[nameof(Attempt)] as CommitAttempt; }
set
{ Fixture.Variables[nameof(Attempt)] = value; }
}
private DateTime Now
{
get
{ return (DateTime)Fixture.Variables[nameof(Now)]; }
set
{ Fixture.Variables[nameof(Now)] = value; }
}
private ICommit Persisted
{
get
{ return Fixture.Variables[nameof(Persisted)] as ICommit; }
set
{ Fixture.Variables[nameof(Persisted)] = value; }
}
private string StreamId
{
get
{ return Fixture.Variables[nameof(StreamId)] as string; }
set
{ Fixture.Variables[nameof(StreamId)] = value; }
}
public when_a_commit_is_successfully_persisted(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
Now = SystemTime.UtcNow.AddYears(1);
StreamId = Guid.NewGuid().ToString();
Attempt = StreamId.BuildAttempt(Now);
Persistence.Commit(Attempt);
}
protected override void Because()
{ Persisted = Persistence.GetFrom(StreamId, 0, int.MaxValue).First(); }
[Fact]
public void should_correctly_persist_the_stream_identifier()
{ Persisted.StreamId.Should().Be(Attempt.StreamId); }
[Fact]
public void should_correctly_persist_the_stream_stream_revision()
{ Persisted.StreamRevision.Should().Be(Attempt.StreamRevision); }
[Fact]
public void should_correctly_persist_the_commit_identifier()
{ Persisted.CommitId.Should().Be(Attempt.CommitId); }
[Fact]
public void should_correctly_persist_the_commit_sequence()
{ Persisted.CommitSequence.Should().Be(Attempt.CommitSequence); }
// persistence engines have varying levels of precision with respect to time.
[Fact]
public void should_correctly_persist_the_commit_stamp()
{
var difference = Persisted.CommitStamp.Subtract(Now);
difference.Days.Should().Be(0);
difference.Hours.Should().Be(0);
difference.Minutes.Should().Be(0);
difference.Should().BeLessOrEqualTo(TimeSpan.FromSeconds(1));
}
[Fact]
public void should_correctly_persist_the_headers()
{ Persisted.Headers.Count.Should().Be(Attempt.Headers.Count); }
[Fact]
public void should_correctly_persist_the_events()
{ Persisted.Events.Count.Should().Be(Attempt.Events.Count); }
[Fact]
public void should_add_the_commit_to_the_set_of_undispatched_commits()
{ Persistence.GetUndispatchedCommits().FirstOrDefault(x => x.CommitId == Attempt.CommitId).Should().NotBeNull(); }
[Fact]
public void should_cause_the_stream_to_be_found_in_the_list_of_streams_to_snapshot()
{ Persistence.GetStreamsToSnapshot(1).FirstOrDefault(x => x.StreamId == StreamId).Should().NotBeNull();}
}
public class when_reading_from_a_given_revision : PersistenceEngineConcern
{
private const int LoadFromCommitContainingRevision = 3;
private const int UpToCommitWithContainingRevision = 5;
private ICommit[] Committed
{
get
{ return Fixture.Variables["committed"] as ICommit[]; }
set
{ Fixture.Variables["committed"] = value; }
}
private ICommit Oldest
{
get
{ return Fixture.Variables["oldest"] as ICommit; }
set
{ Fixture.Variables["oldest"] = value; }
}
private ICommit Oldest2
{
get
{ return Fixture.Variables["oldest2"] as ICommit; }
set
{ Fixture.Variables["oldest2"] = value; }
}
private ICommit Oldest3
{
get
{ return Fixture.Variables["oldest3"] as ICommit; }
set
{ Fixture.Variables["oldest3"] = value; }
}
private string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
public when_reading_from_a_given_revision(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
Oldest = Persistence.CommitSingle(); // 2 events, revision 1-2
Oldest2 = Persistence.CommitNext(Oldest); // 2 events, revision 3-4
Oldest3 = Persistence.CommitNext(Oldest2); // 2 events, revision 5-6
Persistence.CommitNext(Oldest3); // 2 events, revision 7-8
StreamId = Oldest.StreamId;
}
protected override void Because()
{
Committed = Persistence.GetFrom(StreamId, LoadFromCommitContainingRevision, UpToCommitWithContainingRevision).ToArray();
}
[Fact]
public void should_start_from_the_commit_which_contains_the_min_stream_revision_specified()
{
Committed.First().CommitId.Should().Be(Oldest2.CommitId); // contains revision 3
}
[Fact]
public void should_read_up_to_the_commit_which_contains_the_max_stream_revision_specified()
{
Committed.Last().CommitId.Should().Be(Oldest3.CommitId); // contains revision 5
}
}
public class when_reading_from_a_given_revision_to_commit_revision : PersistenceEngineConcern
{
private const int LoadFromCommitContainingRevision = 3;
private const int UpToCommitWithContainingRevision = 6;
private ICommit[] Committed
{
get
{ return Fixture.Variables["committed"] as ICommit[]; }
set
{ Fixture.Variables["committed"] = value; }
}
private ICommit Oldest
{
get
{ return Fixture.Variables["oldest"] as ICommit; }
set
{ Fixture.Variables["oldest"] = value; }
}
private ICommit Oldest2
{
get
{ return Fixture.Variables["oldest2"] as ICommit; }
set
{ Fixture.Variables["oldest2"] = value; }
}
private ICommit Oldest3
{
get
{ return Fixture.Variables["oldest3"] as ICommit; }
set
{ Fixture.Variables["oldest3"] = value; }
}
private string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
public when_reading_from_a_given_revision_to_commit_revision(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
Oldest = Persistence.CommitSingle(); // 2 events, revision 1-2
Oldest2 = Persistence.CommitNext(Oldest); // 2 events, revision 3-4
Oldest3 = Persistence.CommitNext(Oldest2); // 2 events, revision 5-6
Persistence.CommitNext(Oldest3); // 2 events, revision 7-8
StreamId = Oldest.StreamId;
}
protected override void Because()
{
Committed = Persistence.GetFrom(StreamId, LoadFromCommitContainingRevision, UpToCommitWithContainingRevision).ToArray();
}
[Fact]
public void should_start_from_the_commit_which_contains_the_min_stream_revision_specified()
{
Committed.First().CommitId.Should().Be(Oldest2.CommitId); // contains revision 3
}
[Fact]
public void should_read_up_to_the_commit_which_contains_the_max_stream_revision_specified()
{
Committed.Last().CommitId.Should().Be(Oldest3.CommitId); // contains revision 6
}
}
public class when_committing_a_stream_with_the_same_revision : PersistenceEngineConcern
{
private CommitAttempt _attemptWithSameRevision;
private Exception _thrown;
public when_committing_a_stream_with_the_same_revision(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
ICommit commit = Persistence.CommitSingle();
_attemptWithSameRevision = commit.StreamId.BuildAttempt();
}
protected override void Because()
{
_thrown = Catch.Exception(() => Persistence.Commit(_attemptWithSameRevision));
}
[Fact]
public void should_throw_a_ConcurrencyException()
{
_thrown.Should().BeOfType<ConcurrencyException>();
}
}
//TODO:This test looks exactly like the one above. What are we trying to prove?
public class when_committing_a_stream_with_the_same_sequence : PersistenceEngineConcern
{
private CommitAttempt _attempt1, _attempt2;
private Exception _thrown;
public when_committing_a_stream_with_the_same_sequence(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
string streamId = Guid.NewGuid().ToString();
_attempt1 = streamId.BuildAttempt();
_attempt2 = streamId.BuildAttempt(); //TODO mutate a bit
Persistence.Commit(_attempt1);
}
protected override void Because()
{
_thrown = Catch.Exception(() => Persistence.Commit(_attempt2));
}
[Fact]
public void should_throw_a_ConcurrencyException()
{
_thrown.Should().BeOfType<ConcurrencyException>();
}
}
//TODO:This test looks exactly like the one above. What are we trying to prove?
public class when_attempting_to_overwrite_a_committed_sequence : PersistenceEngineConcern
{
private CommitAttempt _failedAttempt;
private Exception _thrown;
public when_attempting_to_overwrite_a_committed_sequence(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
string streamId = Guid.NewGuid().ToString();
CommitAttempt successfulAttempt = streamId.BuildAttempt();
Persistence.Commit(successfulAttempt);
_failedAttempt = streamId.BuildAttempt();
}
protected override void Because()
{
_thrown = Catch.Exception(() => Persistence.Commit(_failedAttempt));
}
[Fact]
public void should_throw_a_ConcurrencyException()
{
_thrown.Should().BeOfType<ConcurrencyException>();
}
}
public class when_attempting_to_persist_a_commit_twice : PersistenceEngineConcern
{
private CommitAttempt _attemptTwice;
private Exception _thrown;
public when_attempting_to_persist_a_commit_twice(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
var commit = Persistence.CommitSingle();
_attemptTwice = new CommitAttempt(
commit.BucketId,
commit.StreamId,
commit.StreamRevision,
commit.CommitId,
commit.CommitSequence,
commit.CommitStamp,
commit.Headers,
commit.Events);
}
protected override void Because()
{
_thrown = Catch.Exception(() => Persistence.Commit(_attemptTwice));
}
[Fact]
public void should_throw_a_DuplicateCommitException()
{
_thrown.Should().BeOfType<DuplicateCommitException>();
}
}
public class when_a_commit_has_been_marked_as_dispatched : PersistenceEngineConcern
{
private ICommit _commit;
public when_a_commit_has_been_marked_as_dispatched(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_commit = Persistence.CommitSingle();
}
protected override void Because()
{
Persistence.MarkCommitAsDispatched(_commit);
}
[Fact]
public void should_no_longer_be_found_in_the_set_of_undispatched_commits()
{
Persistence.GetUndispatchedCommits().FirstOrDefault(x => x.CommitId == _commit.CommitId).Should().BeNull();
}
}
public class when_committing_more_events_than_the_configured_page_size : PersistenceEngineConcern
{
private string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
private ICommit[] Loaded
{
get
{ return Fixture.Variables["loaded"] as ICommit[]; }
set
{ Fixture.Variables["loaded"] = value; }
}
private CommitAttempt[] Committed
{
get
{ return Fixture.Variables["committed"] as CommitAttempt[]; }
set
{ Fixture.Variables["committed"] = value; }
}
public when_committing_more_events_than_the_configured_page_size(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
StreamId = Guid.NewGuid().ToString();
Committed = Persistence.CommitMany(ConfiguredPageSizeForTesting + 2, StreamId).ToArray();
}
protected override void Because()
{
Loaded = Persistence.GetFrom(StreamId, 0, int.MaxValue).ToArray();
}
[Fact]
public void should_load_the_same_number_of_commits_which_have_been_persisted()
{
Loaded.Length.Should().Be(Committed.Length);
}
[Fact]
public void should_load_the_same_commits_which_have_been_persisted()
{
Committed
.All(commit => Loaded.SingleOrDefault(loaded => loaded.CommitId == commit.CommitId) != null)
.Should().BeTrue();
}
}
public class when_saving_a_snapshot : PersistenceEngineConcern
{
private string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
private bool Added
{
get
{ return (bool) Fixture.Variables["added"]; }
set
{ Fixture.Variables["added"] = value; }
}
private Snapshot Snapshot
{
get
{ return Fixture.Variables["snapshot"] as Snapshot; }
set
{ Fixture.Variables["snapshot"] = value; }
}
public when_saving_a_snapshot(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
StreamId = Guid.NewGuid().ToString();
Snapshot = new Snapshot(StreamId, 1, "Snapshot");
Persistence.CommitSingle(StreamId);
}
protected override void Because()
{
Added = Persistence.AddSnapshot(Snapshot);
}
[Fact]
public void should_indicate_the_snapshot_was_added()
{
Added.Should().BeTrue();
}
[Fact]
public void should_be_able_to_retrieve_the_snapshot()
{
Persistence.GetSnapshot(StreamId, Snapshot.StreamRevision).Should().NotBeNull();
}
}
public class when_retrieving_a_snapshot : PersistenceEngineConcern
{
public ISnapshot Correct
{
get
{ return Fixture.Variables["correct"] as ISnapshot; }
set
{ Fixture.Variables["correct"] = value; }
}
public ISnapshot Snapshot
{
get
{ return Fixture.Variables["snapshot"] as ISnapshot; }
set
{ Fixture.Variables["snapshot"] = value; }
}
public ISnapshot TooFarForward
{
get
{ return Fixture.Variables["tooFarForward"] as ISnapshot; }
set
{ Fixture.Variables["tooFarForward"] = value; }
}
public string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
public when_retrieving_a_snapshot(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
StreamId = Guid.NewGuid().ToString();
ICommit commit1 = Persistence.CommitSingle(StreamId); // rev 1-2
ICommit commit2 = Persistence.CommitNext(commit1); // rev 3-4
Persistence.CommitNext(commit2); // rev 5-6
Persistence.AddSnapshot(new Snapshot(StreamId, 1, string.Empty)); //Too far back
Persistence.AddSnapshot(Correct = new Snapshot(StreamId, 3, "Snapshot"));
Persistence.AddSnapshot(TooFarForward = new Snapshot(StreamId, 5, string.Empty));
}
protected override void Because()
{
Snapshot = Persistence.GetSnapshot(StreamId, TooFarForward.StreamRevision - 1);
}
[Fact]
public void should_load_the_most_recent_prior_snapshot()
{
Snapshot.StreamRevision.Should().Be(Correct.StreamRevision);
}
[Fact]
public void should_have_the_correct_snapshot_payload()
{
Snapshot.Payload.Should().Be(Correct.Payload);
}
}
public class when_a_snapshot_has_been_added_to_the_most_recent_commit_of_a_stream : PersistenceEngineConcern
{
private const string SnapshotData = "snapshot";
private ICommit _newest;
private ICommit _oldest, _oldest2;
private string _streamId;
public when_a_snapshot_has_been_added_to_the_most_recent_commit_of_a_stream(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_streamId = Guid.NewGuid().ToString();
_oldest = Persistence.CommitSingle(_streamId);
_oldest2 = Persistence.CommitNext(_oldest);
_newest = Persistence.CommitNext(_oldest2);
}
protected override void Because()
{
Persistence.AddSnapshot(new Snapshot(_streamId, _newest.StreamRevision, SnapshotData));
}
[Fact]
public void should_no_longer_find_the_stream_in_the_set_of_streams_to_be_snapshot()
{
Persistence.GetStreamsToSnapshot(1).Any(x => x.StreamId == _streamId).Should().BeFalse();
}
}
public class when_adding_a_commit_after_a_snapshot : PersistenceEngineConcern
{
private const string SnapshotData = "snapshot";
private const int WithinThreshold = 2;
private const int OverThreshold = 3;
private string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
private ICommit Oldest
{
get
{ return Fixture.Variables["oldest"] as ICommit; }
set
{ Fixture.Variables["oldest"] = value; }
}
private ICommit Oldest2
{
get
{ return Fixture.Variables["oldest2"] as ICommit; }
set
{ Fixture.Variables["oldest2"] = value; }
}
public when_adding_a_commit_after_a_snapshot(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
StreamId = Guid.NewGuid().ToString();
Oldest = Persistence.CommitSingle(StreamId);
Oldest2 = Persistence.CommitNext(Oldest);
Persistence.AddSnapshot(new Snapshot(StreamId, Oldest2.StreamRevision, SnapshotData));
}
protected override void Because()
{
Persistence.Commit(Oldest2.BuildNextAttempt());
}
// Because Raven and Mongo update the stream head asynchronously, occasionally will fail this test
[Fact]
public void should_find_the_stream_in_the_set_of_streams_to_be_snapshot_when_within_the_threshold()
{
Persistence.GetStreamsToSnapshot(WithinThreshold).FirstOrDefault(x => x.StreamId == StreamId).Should().NotBeNull();
}
[Fact]
public void should_not_find_the_stream_in_the_set_of_streams_to_be_snapshot_when_over_the_threshold()
{
Persistence.GetStreamsToSnapshot(OverThreshold).Any(x => x.StreamId == StreamId).Should().BeFalse();
}
}
public class when_reading_all_commits_from_a_particular_point_in_time : PersistenceEngineConcern
{
private ICommit[] _committed;
private CommitAttempt _first;
private DateTime _now;
private ICommit _second;
private string _streamId;
private ICommit _third;
public when_reading_all_commits_from_a_particular_point_in_time(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_streamId = Guid.NewGuid().ToString();
_now = SystemTime.UtcNow.AddYears(1);
_first = _streamId.BuildAttempt(_now.AddSeconds(1));
Persistence.Commit(_first);
_second = Persistence.CommitNext(_first);
_third = Persistence.CommitNext(_second);
Persistence.CommitNext(_third);
}
protected override void Because()
{
_committed = Persistence.GetFrom(_now).ToArray();
}
[Fact]
public void should_return_all_commits_on_or_after_the_point_in_time_specified()
{
_committed.Length.Should().Be(4);
}
}
public class when_paging_over_all_commits_from_a_particular_point_in_time : PersistenceEngineConcern
{
private DateTime Start
{
get
{ return (DateTime)Fixture.Variables["start"]; }
set
{ Fixture.Variables["start"] = value; }
}
private CommitAttempt[] Committed
{
get
{ return Fixture.Variables["committed"] as CommitAttempt[]; }
set
{ Fixture.Variables["committed"] = value; }
}
private ICommit[] Loaded
{
get
{ return Fixture.Variables["loaded"] as ICommit[]; }
set
{ Fixture.Variables["loaded"] = value; }
}
public when_paging_over_all_commits_from_a_particular_point_in_time(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
Start = SystemTime.UtcNow;
// Due to loss in precision in various storage engines, we're rounding down to the
// nearest second to ensure include all commits from the 'start'.
Start = Start.AddSeconds(-1);
Committed = Persistence.CommitMany(ConfiguredPageSizeForTesting + 2).ToArray();
}
protected override void Because()
{
Loaded = Persistence.GetFrom(Start).ToArray();
}
[Fact]
public void should_load_the_same_number_of_commits_which_have_been_persisted()
{
Loaded.Length.Should().Be(Committed.Length);
}
[Fact]
public void should_load_the_same_commits_which_have_been_persisted()
{
Committed
.All(commit => Loaded.SingleOrDefault(loaded => loaded.CommitId == commit.CommitId) != null)
.Should().BeTrue();
}
}
public class when_paging_over_all_commits_from_a_particular_checkpoint : PersistenceEngineConcern
{
private const int checkPoint = 2;
public when_paging_over_all_commits_from_a_particular_checkpoint(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
var committed = Persistence.CommitMany(ConfiguredPageSizeForTesting + 1).Select(c => c.CommitId).ToList();
Fixture.Variables.Add("committed", committed);
}
protected override void Because()
{
var loaded = Persistence.GetFrom(checkPoint.ToString()).Select(c => c.CommitId).ToList();
Fixture.Variables.Add("loaded", loaded);
}
[Fact]
public void should_load_the_same_number_of_commits_which_have_been_persisted_starting_from_the_checkpoint()
{
var loaded = Fixture.Variables["loaded"] as List<Guid>;
var committed = Fixture.Variables["committed"] as List<Guid>;
loaded.Count.Should().Be(committed.Count() - checkPoint);
}
[Fact]
public void should_load_only_the_commits_starting_from_the_checkpoint()
{
var loaded = Fixture.Variables["loaded"] as List<Guid>;
var committed = Fixture.Variables["committed"] as List<Guid>;
committed.Skip(checkPoint).All(x => loaded.Contains(x)).Should().BeTrue(); // all commits should be found in loaded collection
}
}
public class when_reading_all_commits_from_the_year_1_AD : PersistenceEngineConcern
{
private Exception _thrown;
public when_reading_all_commits_from_the_year_1_AD(TestFixture fixture)
: base(fixture)
{ }
protected override void Because()
{
// ReSharper disable once ReturnValueOfPureMethodIsNotUsed
_thrown = Catch.Exception(() => Persistence.GetFrom(DateTime.MinValue).FirstOrDefault());
}
[Fact]
public void should_NOT_throw_an_exception()
{
_thrown.Should().BeNull();
}
}
public class when_purging_all_commits : PersistenceEngineConcern
{
public when_purging_all_commits(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
Persistence.CommitSingle();
}
protected override void Because()
{
Persistence.Purge();
}
[Fact]
public void should_not_find_any_commits_stored()
{
Persistence.GetFrom(DateTime.MinValue).Count().Should().Be(0);
}
[Fact]
public void should_not_find_any_streams_to_snapshot()
{
Persistence.GetStreamsToSnapshot(0).Count().Should().Be(0);
}
[Fact]
public void should_not_find_any_undispatched_commits()
{
Persistence.GetUndispatchedCommits().Count().Should().Be(0);
}
}
public class when_invoking_after_disposal : PersistenceEngineConcern
{
private Exception _thrown;
public when_invoking_after_disposal(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
Persistence.Dispose();
}
protected override void Because()
{
_thrown = Catch.Exception(() => Persistence.CommitSingle());
}
[Fact]
public void should_throw_an_ObjectDisposedException()
{
_thrown.Should().BeOfType<ObjectDisposedException>();
}
}
public class when_committing_a_stream_with_the_same_id_as_a_stream_in_another_bucket : PersistenceEngineConcern
{
const string _bucketAId = "a";
const string _bucketBId = "b";
private string StreamId
{
get
{ return Fixture.Variables["streamId"] as string; }
set
{ Fixture.Variables["streamId"] = value; }
}
private CommitAttempt AttemptForBucketB
{
get
{ return Fixture.Variables["attemptForBucketB"] as CommitAttempt; }
set
{ Fixture.Variables["attemptForBucketB"] = value; }
}
private Exception Thrown
{
get
{ return Fixture.Variables["thrown"] as Exception; }
set
{ Fixture.Variables["thrown"] = value; }
}
private DateTime AttemptACommitStamp
{
get
{ return (DateTime) Fixture.Variables["attemptACommitStamp"]; }
set
{ Fixture.Variables["attemptACommitStamp"] = value; }
}
public when_committing_a_stream_with_the_same_id_as_a_stream_in_another_bucket(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
StreamId = Guid.NewGuid().ToString();
DateTime now = SystemTime.UtcNow;
Persistence.Commit(StreamId.BuildAttempt(now, _bucketAId));
AttemptACommitStamp = Persistence.GetFrom(_bucketAId, StreamId, 0, int.MaxValue).First().CommitStamp;
AttemptForBucketB = StreamId.BuildAttempt(now.Subtract(TimeSpan.FromDays(1)), _bucketBId);
}
protected override void Because()
{
Thrown = Catch.Exception(() => Persistence.Commit(AttemptForBucketB));
}
[Fact]
public void should_succeed()
{
Thrown.Should().BeNull();
}
[Fact]
public void should_persist_to_the_correct_bucket()
{
ICommit[] stream = Persistence.GetFrom(_bucketBId, StreamId, 0, int.MaxValue).ToArray();
stream.Should().NotBeNull();
stream.Count().Should().Be(1);
}
[Fact]
public void should_not_affect_the_stream_from_the_other_bucket()
{
ICommit[] stream = Persistence.GetFrom(_bucketAId, StreamId, 0, int.MaxValue).ToArray();
stream.Should().NotBeNull();
stream.Count().Should().Be(1);
stream.First().CommitStamp.Should().Be(AttemptACommitStamp);
}
}
public class when_saving_a_snapshot_for_a_stream_with_the_same_id_as_a_stream_in_another_bucket : PersistenceEngineConcern
{
const string _bucketAId = "a";
const string _bucketBId = "b";
string _streamId;
private static Snapshot _snapshot;
public when_saving_a_snapshot_for_a_stream_with_the_same_id_as_a_stream_in_another_bucket(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_streamId = Guid.NewGuid().ToString();
_snapshot = new Snapshot(_bucketBId, _streamId, 1, "Snapshot");
Persistence.Commit(_streamId.BuildAttempt(bucketId: _bucketAId));
Persistence.Commit(_streamId.BuildAttempt(bucketId: _bucketBId));
}
protected override void Because()
{
Persistence.AddSnapshot(_snapshot);
}
[Fact]
public void should_affect_snapshots_from_another_bucket()
{
Persistence.GetSnapshot(_bucketAId, _streamId, _snapshot.StreamRevision).Should().BeNull();
}
}
public class when_reading_all_commits_from_a_particular_point_in_time_and_there_are_streams_in_multiple_buckets : PersistenceEngineConcern
{
const string _bucketAId = "a";
const string _bucketBId = "b";
private static DateTime _now;
private static ICommit[] _returnedCommits;
private CommitAttempt _commitToBucketB;
public when_reading_all_commits_from_a_particular_point_in_time_and_there_are_streams_in_multiple_buckets(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_now = SystemTime.UtcNow.AddYears(1);
var commitToBucketA = Guid.NewGuid().ToString().BuildAttempt(_now.AddSeconds(1), _bucketAId);
Persistence.Commit(commitToBucketA);
Persistence.Commit(commitToBucketA = commitToBucketA.BuildNextAttempt());
Persistence.Commit(commitToBucketA = commitToBucketA.BuildNextAttempt());
Persistence.Commit(commitToBucketA.BuildNextAttempt());
_commitToBucketB = Guid.NewGuid().ToString().BuildAttempt(_now.AddSeconds(1), _bucketBId);
Persistence.Commit(_commitToBucketB);
}
protected override void Because()
{
_returnedCommits = Persistence.GetFrom(_bucketAId, _now).ToArray();
}
[Fact]
public void should_not_return_commits_from_other_buckets()
{
_returnedCommits.Any(c => c.CommitId.Equals(_commitToBucketB.CommitId)).Should().BeFalse();
}
}
public class when_getting_all_commits_since_checkpoint_and_there_are_streams_in_multiple_buckets : PersistenceEngineConcern
{
private ICommit[] Commits
{
get
{ return Fixture.Variables[nameof(Commits)] as ICommit[]; }
set
{ Fixture.Variables[nameof(Commits)] = value; }
}
public when_getting_all_commits_since_checkpoint_and_there_are_streams_in_multiple_buckets(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
const string bucketAId = "a";
const string bucketBId = "b";
Persistence.Commit(Guid.NewGuid().ToString().BuildAttempt(bucketId: bucketAId));
Persistence.Commit(Guid.NewGuid().ToString().BuildAttempt(bucketId: bucketBId));
Persistence.Commit(Guid.NewGuid().ToString().BuildAttempt(bucketId: bucketAId));
}
protected override void Because()
{
Commits = Persistence.GetFromStart().ToArray();
}
[Fact]
public void should_not_be_empty()
{
Commits.Should().NotBeNull();
}
[Fact]
public void should_be_in_order_by_checkpoint()
{
ICheckpoint checkpoint = Persistence.GetCheckpoint();
foreach (var commit in Commits)
{
ICheckpoint commitCheckpoint = Persistence.GetCheckpoint(commit.CheckpointToken);
commitCheckpoint.Should().BeGreaterThan(checkpoint);
checkpoint = Persistence.GetCheckpoint(commit.CheckpointToken);
}
}
}
public class when_purging_all_commits_and_there_are_streams_in_multiple_buckets : PersistenceEngineConcern
{
const string _bucketAId = "a";
const string _bucketBId = "b";
string _streamId;
public when_purging_all_commits_and_there_are_streams_in_multiple_buckets(TestFixture fixture)
: base(fixture)
{ }
protected override void Context()
{
_streamId = Guid.NewGuid().ToString();
Persistence.Commit(_streamId.BuildAttempt(bucketId: _bucketAId));
Persistence.Commit(_streamId.BuildAttempt(bucketId: _bucketBId));
}
protected override void Because()
{
Persistence.Purge();
}
[Fact]
public void should_purge_all_commits_stored_in_bucket_a()
{
Persistence.GetFrom(_bucketAId, DateTime.MinValue).Count().Should().Be(0);
}
[Fact]
public void should_purge_all_commits_stored_in_bucket_b()
{
Persistence.GetFrom(_bucketBId, DateTime.MinValue).Count().Should().Be(0);
}
[Fact]
public void should_purge_all_streams_to_snapshot_in_bucket_a()
{
Persistence.GetStreamsToSnapshot(_bucketAId, 0).Count().Should().Be(0);
}
[Fact]
public void should_purge_all_streams_to_snapshot_in_bucket_b()
{
Persistence.GetStreamsToSnapshot(_bucketBId, 0).Count().Should().Be(0);
}
[Fact]
public void should_purge_all_undispatched_commits()
{
Persistence.GetUndispatchedCommits().Count().Should().Be(0);
}
}
public class when_gettingfromcheckpoint_amount_of_commits_exceeds_pagesize : PersistenceEngineConcern
{
private ICommit[] _commits;
private int _moreThanPageSize;
public when_gettingfromcheckpoint_amount_of_commits_exceeds_pagesize(TestFixture fixture)
: base(fixture)
{ }
protected override void Because()
{
_moreThanPageSize = ConfiguredPageSizeForTesting + 1;
var eventStore = new OptimisticEventStore(Persistence, null);
// TODO: Not sure how to set the actual pagesize to the const defined above
for (int i = 0; i < _moreThanPageSize; i++)
{
using (IEventStream stream = eventStore.OpenStream(Guid.NewGuid()))
{
stream.Add(new EventMessage { Body = i });
stream.CommitChanges(Guid.NewGuid());
}
}
ICommit[] commits = Persistence.GetFrom(DateTime.MinValue).ToArray();
_commits = Persistence.GetFrom().ToArray();
}
[Fact]
public void Should_have_expected_number_of_commits()
{
_commits.Length.Should().Be(_moreThanPageSize);
}
}
public class when_a_payload_is_large : PersistenceEngineConcern
{
public when_a_payload_is_large(TestFixture fixture)
: base(fixture)
{ }
[Fact]
public void can_commit()
{
const int bodyLength = 100000;
var attempt = new CommitAttempt(
Bucket.Default,
Guid.NewGuid().ToString(),
1,
Guid.NewGuid(),
1,
DateTime.UtcNow,
new Dictionary<string, object>(),
new List<EventMessage> { new EventMessage { Body = new string('a', bodyLength) } });
Persistence.Commit(attempt);
ICommit commits = Persistence.GetFrom().Single();
commits.Events.Single().Body.ToString().Length.Should().Be(bodyLength);
}
}
public abstract class PersistenceEngineConcern : SpecificationBase<TestFixture>
{
public PersistenceEngineConcern(TestFixture fixture)
: base(fixture)
{ }
protected IPersistStreams Persistence
{
get
{
if (!Fixture.Variables.TryGetValue(nameof(Persistence), out var persistence))
{
persistence = new PerformanceCounterPersistenceEngine(new InMemoryPersistenceEngine(), "tests");
((IPersistStreams)persistence).Initialize();
Persistence = persistence as IPersistStreams;
}
return persistence as IPersistStreams;
}
set
{ Fixture.Variables[nameof(Persistence)] = value; }
}
protected int ConfiguredPageSizeForTesting
{
get { return 2; }
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.Collections;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Runtime.InteropServices;
using System.IO.Ports;
using System.Threading;
using System.Threading.Tasks;
namespace System.IO.Ports
{
internal sealed partial class SerialStream : Stream
{
private const int TimeoutResolution = 30;
// time [ms] loop has to be idle before it stops
private const int IOLoopIdleTimeout = 2000;
private bool _ioLoopFinished = false;
private SafeSerialDeviceHandle _handle = null;
private int _baudRate;
private StopBits _stopBits;
private Parity _parity;
private int _dataBits = 8;
private bool _rtsEnable = false;
private int _readTimeout = 0;
private int _writeTimeout = 0;
private byte[] _tempBuf = new byte[1];
private Task _ioLoop;
private object _ioLoopLock = new object();
private ConcurrentQueue<SerialStreamIORequest> _readQueue = new ConcurrentQueue<SerialStreamIORequest>();
private ConcurrentQueue<SerialStreamIORequest> _writeQueue = new ConcurrentQueue<SerialStreamIORequest>();
private long _totalBytesRead = 0;
private long TotalBytesAvailable => _totalBytesRead + BytesToRead;
private long _lastTotalBytesAvailable;
// called when one character is received.
private SerialDataReceivedEventHandler _dataReceived;
internal event SerialDataReceivedEventHandler DataReceived
{
add
{
bool wasNull = _dataReceived == null;
_dataReceived += value;
if (wasNull)
{
EnsureIOLoopRunning();
}
}
remove
{
_dataReceived -= value;
}
}
// ----SECTION: inherited properties from Stream class ------------*
// These six properties are required for SerialStream to inherit from the abstract Stream class.
// Note four of them are always true or false, and two of them throw exceptions, so these
// are not usefully queried by applications which know they have a SerialStream, etc...
public override int ReadTimeout
{
get { return _readTimeout; }
set
{
if (value < 0 && value != SerialPort.InfiniteTimeout)
throw new ArgumentOutOfRangeException(nameof(ReadTimeout), SR.ArgumentOutOfRange_Timeout);
if (_handle == null) {
InternalResources.FileNotOpen();
}
_readTimeout = value;
}
}
public override int WriteTimeout
{
get { return _writeTimeout; }
set
{
if (value < 0 && value != SerialPort.InfiniteTimeout)
throw new ArgumentOutOfRangeException(nameof(ReadTimeout), SR.ArgumentOutOfRange_Timeout);
if (_handle == null) {
InternalResources.FileNotOpen();
}
_writeTimeout = value;
}
}
private static void CheckBaudRate(int baudRate)
{
if (baudRate <= 0)
{
throw new ArgumentOutOfRangeException(nameof(BaudRate), SR.ArgumentOutOfRange_NeedPosNum);
}
}
internal int BaudRate
{
set
{
if (value != _baudRate)
{
CheckBaudRate(value);
if (Interop.Termios.TermiosSetSpeed(_handle, value) < 0)
{
throw GetLastIOError();
}
_baudRate = value;
}
}
get
{
return Interop.Termios.TermiosGetSpeed(_handle);
}
}
public bool BreakState
{
get { return _inBreak; }
set
{
if (value)
{
// Unlike Windows, there is no infinite break and positive value is platform dependent.
// As best guess, send break with default duration.
Interop.Termios.TermiosSendBreak(_handle, 0);
}
_inBreak = value;
}
}
internal int BytesToWrite
{
get { return Interop.Termios.TermiosGetAvailableBytes(_handle, Interop.Termios.Queue.SendQueue); }
}
internal int BytesToRead
{
get { return Interop.Termios.TermiosGetAvailableBytes(_handle, Interop.Termios.Queue.ReceiveQueue); }
}
internal bool CDHolding
{
get
{
int status = Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalDcd);
if (status < 0)
{
throw GetLastIOError();
}
return status == 1;
}
}
internal bool CtsHolding
{
get
{
int status = Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalCts);
if (status < 0)
{
throw GetLastIOError();
}
return status == 1;
}
}
internal bool DsrHolding
{
get
{
int status = Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalDsr);
if (status < 0)
{
throw GetLastIOError();
}
return status == 1;
}
}
internal bool DtrEnable
{
get
{
int status = Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalDtr);
if (status < 0)
{
throw GetLastIOError();
}
return status == 1;
}
set
{
if (Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalDtr, value ? 1 : 0) != 0)
{
throw GetLastIOError();
}
}
}
private bool RtsEnabledNative()
{
int status = Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalRts);
if (status < 0)
{
throw GetLastIOError();
}
return status == 1;
}
internal bool RtsEnable
{
get
{
if ((_handshake == Handshake.RequestToSend || _handshake == Handshake.RequestToSendXOnXOff))
throw new InvalidOperationException(SR.CantSetRtsWithHandshaking);
return RtsEnabledNative();
}
set
{
if ((_handshake == Handshake.RequestToSend || _handshake == Handshake.RequestToSendXOnXOff))
throw new InvalidOperationException(SR.CantSetRtsWithHandshaking);
if (Interop.Termios.TermiosGetSignal(_handle, Interop.Termios.Signals.SignalRts, value ? 1 : 0) != 0)
{
throw GetLastIOError();
}
}
}
internal Handshake Handshake
{
set
{
Debug.Assert(!(value < Handshake.None || value > Handshake.RequestToSendXOnXOff),
"An invalid value was passed to Handshake");
if (value != _handshake)
{
if (Interop.Termios.TermiosReset(_handle, _baudRate, _dataBits, _stopBits, _parity, value) != 0)
{
throw new ArgumentException();
}
_handshake = value;
}
}
}
internal int DataBits
{
set
{
Debug.Assert(!(value < MinDataBits || value > MaxDataBits), "An invalid value was passed to DataBits");
if (value != _dataBits)
{
if (Interop.Termios.TermiosReset(_handle, _baudRate, value, _stopBits, _parity, _handshake) != 0)
{
throw new ArgumentException();
}
_dataBits = value;
}
}
}
internal Parity Parity
{
set
{
Debug.Assert(!(value < Parity.None || value > Parity.Space), "An invalid value was passed to Parity");
if (value != _parity)
{
if (Interop.Termios.TermiosReset(_handle, _baudRate, _dataBits, _stopBits, value, _handshake) != 0)
{
throw new ArgumentException();
}
_parity = value;
}
}
}
internal StopBits StopBits
{
set
{
Debug.Assert(!(value < StopBits.One || value > StopBits.OnePointFive), "An invalid value was passed to StopBits");
if (value != _stopBits)
{
if (Interop.Termios.TermiosReset(_handle, _baudRate, _dataBits, value, _parity, _handshake) != 0)
{
throw new ArgumentException();
}
_stopBits = value;
}
}
}
internal bool DiscardNull
{
set
{
// Ignore.
}
}
internal byte ParityReplace
{
set
{
// Ignore.
}
}
internal void DiscardInBuffer()
{
if (_handle == null) InternalResources.FileNotOpen();
// This may or may not work depending on hardware.
Interop.Termios.TermiosDiscard(_handle, Interop.Termios.Queue.ReceiveQueue);
}
internal void DiscardOutBuffer()
{
if (_handle == null) InternalResources.FileNotOpen();
// This may or may not work depending on hardware.
Interop.Termios.TermiosDiscard(_handle, Interop.Termios.Queue.SendQueue);
}
internal void SetBufferSizes(int readBufferSize, int writeBufferSize)
{
if (_handle == null) InternalResources.FileNotOpen();
// Ignore for now.
}
internal bool IsOpen => _handle != null;
// Flush dumps the contents of the serial driver's internal read and write buffers.
// We actually expose the functionality for each, but fulfilling Stream's contract
// requires a Flush() method. Fails if handle closed.
// Note: Serial driver's write buffer is *already* attempting to write it, so we can only wait until it finishes.
public override void Flush()
{
if (_handle == null) InternalResources.FileNotOpen();
Interop.Termios.TermiosDiscard(_handle, Interop.Termios.Queue.AllQueues);
}
internal int ReadByte(int timeout)
{
Read(_tempBuf, 0, 1, timeout);
return _tempBuf[0];
}
public override int Read(byte[] array, int offset, int count)
{
return Read(array, offset, count, ReadTimeout);
}
internal unsafe int Read(byte[] array, int offset, int count, int timeout)
{
using (CancellationTokenSource cts = GetCancellationTokenSourceFromTimeout(timeout))
{
Task<int> t = ReadAsync(array, offset, count, cts?.Token ?? CancellationToken.None);
try
{
return t.GetAwaiter().GetResult();
}
catch (OperationCanceledException)
{
throw new TimeoutException();
}
}
}
public override int EndRead(IAsyncResult asyncResult)
=> EndReadWrite(asyncResult);
public override Task<int> ReadAsync(byte[] array, int offset, int count, CancellationToken cancellationToken)
{
CheckReadWriteArguments(array, offset, count);
if (count == 0)
return Task<int>.FromResult(0); // return immediately if no bytes requested; no need for overhead.
Memory<byte> buffer = new Memory<byte>(array, offset, count);
SerialStreamIORequest result = new SerialStreamIORequest(cancellationToken, buffer);
_readQueue.Enqueue(result);
EnsureIOLoopRunning();
return result.Task;
}
public override Task WriteAsync(byte[] array, int offset, int count, CancellationToken cancellationToken)
{
CheckWriteArguments(array, offset, count);
if (count == 0)
return Task.CompletedTask; // return immediately if no bytes to write; no need for overhead.
Memory<byte> buffer = new Memory<byte>(array, offset, count);
SerialStreamIORequest result = new SerialStreamIORequest(cancellationToken, buffer);
_writeQueue.Enqueue(result);
EnsureIOLoopRunning();
return result.Task;
}
public override IAsyncResult BeginRead(byte[] array, int offset, int numBytes, AsyncCallback userCallback, object stateObject)
{
return TaskToApm.Begin(ReadAsync(array, offset, numBytes), userCallback, stateObject);
}
// Will wait `timeout` miliseconds or until reading or writing is possible
// If no operation is requested it will throw
// Returns event which has happened
private Interop.Sys.PollEvents PollEvents(int timeout, bool pollReadEvents, bool pollWriteEvents, out Interop.ErrorInfo? error)
{
if (!pollReadEvents && !pollWriteEvents)
{
Debug.Fail("This should not happen");
throw new Exception();
}
Interop.Sys.PollEvents eventsToPoll = Interop.Sys.PollEvents.POLLERR;
if (pollReadEvents)
{
eventsToPoll |= Interop.Sys.PollEvents.POLLIN;
}
if (pollWriteEvents)
{
eventsToPoll |= Interop.Sys.PollEvents.POLLOUT;
}
Interop.Sys.PollEvents events = Interop.Sys.PollEvents.POLLNONE;
Interop.Error ret = Interop.Sys.Poll(
_handle,
eventsToPoll,
timeout,
out events);
error = ret != Interop.Error.SUCCESS ? Interop.Sys.GetLastErrorInfo() : (Interop.ErrorInfo?)null;
return events;
}
internal void Write(byte[] array, int offset, int count, int timeout)
{
using (CancellationTokenSource cts = GetCancellationTokenSourceFromTimeout(timeout))
{
Task t = WriteAsync(array, offset, count, cts?.Token ?? CancellationToken.None);
try
{
t.GetAwaiter().GetResult();
}
catch (OperationCanceledException)
{
throw new TimeoutException();
}
}
}
public override IAsyncResult BeginWrite(byte[] array, int offset, int count, AsyncCallback userCallback, object stateObject)
{
return TaskToApm.Begin(WriteAsync(array, offset, count), userCallback, stateObject);
}
public override void EndWrite(IAsyncResult asyncResult)
=> EndReadWrite(asyncResult);
private int EndReadWrite(IAsyncResult asyncResult)
{
try
{
return TaskToApm.End<int>(asyncResult);
}
catch (OperationCanceledException)
{
throw new TimeoutException();
}
}
// this method is used by SerialPort upon SerialStream's creation
internal SerialStream(string portName, int baudRate, Parity parity, int dataBits, StopBits stopBits, int readTimeout, int writeTimeout, Handshake handshake,
bool dtrEnable, bool rtsEnable, bool discardNull, byte parityReplace)
{
if (portName == null)
{
throw new ArgumentNullException(nameof(portName));
}
CheckBaudRate(baudRate);
// Error checking done in SerialPort.
SafeSerialDeviceHandle tempHandle = SafeSerialDeviceHandle.Open(portName);
try
{
_handle = tempHandle;
// set properties of the stream that exist as members in SerialStream
_portName = portName;
_handshake = handshake;
_parity = parity;
_readTimeout = readTimeout;
_writeTimeout = writeTimeout;
_baudRate = baudRate;
_stopBits = stopBits;
_dataBits = dataBits;
if (Interop.Termios.TermiosReset(_handle, _baudRate, _dataBits, _stopBits, _parity, _handshake) != 0)
{
throw new ArgumentException();
}
DtrEnable = dtrEnable;
BaudRate = baudRate;
// now set this.RtsEnable to the specified value.
// Handshake takes precedence, this will be a nop if
// handshake is either RequestToSend or RequestToSendXOnXOff
if ((handshake != Handshake.RequestToSend && handshake != Handshake.RequestToSendXOnXOff))
{
// query and cache the initial RtsEnable value
// so that set_RtsEnable can do the (value != rtsEnable) optimization
_rtsEnable = RtsEnabledNative();
RtsEnable = rtsEnable;
}
}
catch
{
// if there are any exceptions after the call to CreateFile, we need to be sure to close the
// handle before we let them continue up.
tempHandle.Dispose();
_handle = null;
throw;
}
_processReadDelegate = ProcessRead;
_processWriteDelegate = ProcessWrite;
_lastTotalBytesAvailable = TotalBytesAvailable;
}
private void EnsureIOLoopRunning()
{
lock (_ioLoopLock)
{
if (_ioLoop == null)
{
Debug.Assert(_handle != null);
_ioLoop = Task.Factory.StartNew(
IOLoop,
CancellationToken.None,
TaskCreationOptions.LongRunning,
TaskScheduler.Default);
}
}
}
private void FinishPendingIORequests(Interop.ErrorInfo? error = null)
{
while (_readQueue.TryDequeue(out SerialStreamIORequest r))
{
r.Complete(error.HasValue ?
Interop.GetIOException(error.Value) :
InternalResources.FileNotOpenException());
}
while (_writeQueue.TryDequeue(out SerialStreamIORequest r))
{
r.Complete(error.HasValue ?
Interop.GetIOException(error.Value) :
InternalResources.FileNotOpenException());
}
}
protected override void Dispose(bool disposing)
{
_ioLoopFinished = true;
if (disposing)
{
_ioLoop?.GetAwaiter().GetResult();
_ioLoop = null;
FinishPendingIORequests();
if (_handle != null)
{
_handle.Dispose();
_handle = null;
}
}
base.Dispose(disposing);
}
// RaiseDataReceivedChars and RaiseDataReceivedEof could be one function
// but are currently split to avoid allocation related to context
private void RaiseDataReceivedChars()
{
if (_dataReceived != null)
{
ThreadPool.QueueUserWorkItem(s => {
var thisRef = (SerialStream)s;
SerialDataReceivedEventHandler dataReceived = thisRef._dataReceived;
if (dataReceived != null)
{
dataReceived(thisRef, new SerialDataReceivedEventArgs(SerialData.Chars));
}
}, this);
}
}
private void RaiseDataReceivedEof()
{
if (_dataReceived != null)
{
ThreadPool.QueueUserWorkItem(s => {
var thisRef = (SerialStream)s;
SerialDataReceivedEventHandler dataReceived = thisRef._dataReceived;
if (dataReceived != null)
{
dataReceived(thisRef, new SerialDataReceivedEventArgs(SerialData.Eof));
}
}, this);
}
}
// should return non-negative integer meaning numbers of bytes read/written (0 for errors)
private delegate int RequestProcessor(SerialStreamIORequest r);
private RequestProcessor _processReadDelegate;
private RequestProcessor _processWriteDelegate;
private unsafe int ProcessRead(SerialStreamIORequest r)
{
Span<byte> buff = r.Buffer.Span;
fixed (byte* bufPtr = buff)
{
// assumes dequeue-ing happens on a single thread
int numBytes = Interop.Sys.Read(_handle, bufPtr, buff.Length);
if (numBytes < 0)
{
Interop.ErrorInfo lastError = Interop.Sys.GetLastErrorInfo();
// ignore EWOULDBLOCK since we handle timeout elsewhere
if (lastError.Error != Interop.Error.EWOULDBLOCK)
{
r.Complete(Interop.GetIOException(lastError));
}
}
else if (numBytes > 0)
{
r.Complete(numBytes);
return numBytes;
}
else // numBytes == 0
{
RaiseDataReceivedEof();
}
}
return 0;
}
private unsafe int ProcessWrite(SerialStreamIORequest r)
{
ReadOnlySpan<byte> buff = r.Buffer.Span;
fixed (byte* bufPtr = buff)
{
// assumes dequeue-ing happens on a single thread
int numBytes = Interop.Sys.Write(_handle, bufPtr, buff.Length);
if (numBytes <= 0)
{
Interop.ErrorInfo lastError = Interop.Sys.GetLastErrorInfo();
// ignore EWOULDBLOCK since we handle timeout elsewhere
// numBytes == 0 means that there might be an error
if (lastError.Error != Interop.Error.SUCCESS && lastError.Error != Interop.Error.EWOULDBLOCK)
{
r.Complete(Interop.GetIOException(lastError));
}
}
else
{
r.ProcessBytes(numBytes);
if (r.Buffer.Length == 0)
{
r.Complete();
}
return numBytes;
}
}
return 0;
}
// returns number of bytes read/written
private static int DoIORequest(ConcurrentQueue<SerialStreamIORequest> q, RequestProcessor op)
{
// assumes dequeue-ing happens on a single thread
while (q.TryPeek(out SerialStreamIORequest r))
{
if (r.IsCompleted)
{
q.TryDequeue(out _);
// take another item since we haven't processed anything
continue;
}
int ret = op(r);
Debug.Assert(ret >= 0);
if (r.IsCompleted)
{
q.TryDequeue(out _);
}
return ret;
}
return 0;
}
private unsafe void IOLoop()
{
bool eofReceived = false;
// we do not care about bytes we got before - only about changes
// loop just got started which means we just got request
bool lastIsIdle = false;
int ticksWhenIdleStarted = 0;
while (IsOpen && !eofReceived && !_ioLoopFinished)
{
bool hasPendingReads = !_readQueue.IsEmpty;
bool hasPendingWrites = !_writeQueue.IsEmpty;
bool hasPendingIO = hasPendingReads || hasPendingWrites;
bool isIdle = _dataReceived == null && !hasPendingIO;
if (!hasPendingIO)
{
if (isIdle)
{
if (!lastIsIdle)
{
// we've just started idling
ticksWhenIdleStarted = Environment.TickCount;
}
else if (Environment.TickCount - ticksWhenIdleStarted > IOLoopIdleTimeout)
{
// we are already idling for a while
// let's stop the loop until there is some work to do
lock (_ioLoopLock)
{
// double check we are done under lock
if (_dataReceived == null && _readQueue.IsEmpty && _writeQueue.IsEmpty)
{
_ioLoop = null;
break;
}
else
{
// to make sure timer restarts
lastIsIdle = false;
continue;
}
}
}
}
Thread.Sleep(1);
}
else
{
Interop.Sys.PollEvents events = PollEvents(1,
pollReadEvents: hasPendingReads,
pollWriteEvents: hasPendingWrites,
out Interop.ErrorInfo? error);
if (error.HasValue)
{
FinishPendingIORequests(error);
break;
}
if (events.HasFlag(Interop.Sys.PollEvents.POLLNVAL) ||
events.HasFlag(Interop.Sys.PollEvents.POLLERR))
{
// bad descriptor or some other error we can't handle
FinishPendingIORequests();
break;
}
if (events.HasFlag(Interop.Sys.PollEvents.POLLIN))
{
int bytesRead = DoIORequest(_readQueue, _processReadDelegate);
_totalBytesRead += bytesRead;
}
if (events.HasFlag(Interop.Sys.PollEvents.POLLOUT))
{
DoIORequest(_writeQueue, _processWriteDelegate);
}
}
// check if there is any new data (either already read or in the driver input)
// this event is private and handled inside of SerialPort
// which then throttles it with the threshold
long totalBytesAvailable = TotalBytesAvailable;
if (totalBytesAvailable > _lastTotalBytesAvailable)
{
_lastTotalBytesAvailable = totalBytesAvailable;
RaiseDataReceivedChars();
}
lastIsIdle = isIdle;
}
}
private static CancellationTokenSource GetCancellationTokenSourceFromTimeout(int timeoutMs)
{
return timeoutMs == SerialPort.InfiniteTimeout ?
null :
new CancellationTokenSource(Math.Max(timeoutMs, TimeoutResolution));
}
private static Exception GetLastIOError()
{
return Interop.GetIOException(Interop.Sys.GetLastErrorInfo());
}
private class SerialStreamIORequest : TaskCompletionSource<int>
{
public Memory<byte> Buffer { get; private set; }
public bool IsCompleted => Task.IsCompleted;
private CancellationToken _cancellationToken;
public SerialStreamIORequest(CancellationToken ct, Memory<byte> buffer)
: base(TaskCreationOptions.RunContinuationsAsynchronously)
{
_cancellationToken = ct;
ct.Register(s => ((TaskCompletionSource<int>)s).TrySetCanceled(), this);
Buffer = buffer;
}
internal void Complete()
{
Debug.Assert(Buffer.Length == 0);
TrySetResult(Buffer.Length);
}
internal void Complete(int numBytes)
{
TrySetResult(numBytes);
}
internal void Complete(Exception exception)
{
TrySetException(exception);
}
internal void ProcessBytes(int numBytes)
{
Buffer = Buffer.Slice(numBytes);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//
// This is used internally to create best fit behavior as per the original windows best fit behavior.
//
using System;
using System.Text;
using System.Threading;
using System.Diagnostics;
namespace System.Text
{
internal sealed class InternalDecoderBestFitFallback : DecoderFallback
{
// Our variables
internal BaseCodePageEncoding encoding;
internal char[]? arrayBestFit = null;
internal char cReplacement = '?';
internal InternalDecoderBestFitFallback(BaseCodePageEncoding _encoding)
{
// Need to load our replacement characters table.
encoding = _encoding;
}
public override DecoderFallbackBuffer CreateFallbackBuffer() =>
new InternalDecoderBestFitFallbackBuffer(this);
// Maximum number of characters that this instance of this fallback could return
public override int MaxCharCount => 1;
public override bool Equals(object? value) =>
value is InternalDecoderBestFitFallback that && encoding.CodePage == that.encoding.CodePage;
public override int GetHashCode() => encoding.CodePage;
}
internal sealed class InternalDecoderBestFitFallbackBuffer : DecoderFallbackBuffer
{
// Our variables
internal char cBestFit = '\0';
internal int iCount = -1;
internal int iSize;
private readonly InternalDecoderBestFitFallback _oFallback;
// Private object for locking instead of locking on a public type for SQL reliability work.
private static object? s_InternalSyncObject;
private static object InternalSyncObject
{
get
{
if (s_InternalSyncObject == null)
{
object o = new object();
Interlocked.CompareExchange<object?>(ref s_InternalSyncObject, o, null);
}
return s_InternalSyncObject;
}
}
// Constructor
public InternalDecoderBestFitFallbackBuffer(InternalDecoderBestFitFallback fallback)
{
_oFallback = fallback;
if (_oFallback.arrayBestFit == null)
{
// Lock so we don't confuse ourselves.
lock (InternalSyncObject)
{
// Double check before we do it again.
if (_oFallback.arrayBestFit == null)
_oFallback.arrayBestFit = fallback.encoding.GetBestFitBytesToUnicodeData();
}
}
}
// Fallback methods
public override bool Fallback(byte[] bytesUnknown, int index)
{
// We expect no previous fallback in our buffer
Debug.Assert(iCount < 1, "[DecoderReplacementFallbackBuffer.Fallback] Calling fallback without a previously empty buffer");
cBestFit = TryBestFit(bytesUnknown);
if (cBestFit == '\0')
cBestFit = _oFallback.cReplacement;
iCount = iSize = 1;
return true;
}
// Default version is overridden in DecoderReplacementFallback.cs
public override char GetNextChar()
{
// We want it to get < 0 because == 0 means that the current/last character is a fallback
// and we need to detect recursion. We could have a flag but we already have this counter.
iCount--;
// Do we have anything left? 0 is now last fallback char, negative is nothing left
if (iCount < 0)
return '\0';
// Need to get it out of the buffer.
// Make sure it didn't wrap from the fast count-- path
if (iCount == int.MaxValue)
{
iCount = -1;
return '\0';
}
// Return the best fit character
return cBestFit;
}
public override bool MovePrevious()
{
// Exception fallback doesn't have anywhere to back up to.
if (iCount >= 0)
iCount++;
// Return true if we could do it.
return (iCount >= 0 && iCount <= iSize);
}
// How many characters left to output?
public override int Remaining
{
get
{
return (iCount > 0) ? iCount : 0;
}
}
// Clear the buffer
public override unsafe void Reset()
{
iCount = -1;
}
// This version just counts the fallback and doesn't actually copy anything.
internal unsafe int InternalFallback(byte[] bytes, byte* pBytes)
// Right now this has both bytes and bytes[], since we might have extra bytes, hence the
// array, and we might need the index, hence the byte*
{
// return our replacement string Length (always 1 for InternalDecoderBestFitFallback, either
// a best fit char or ?
return 1;
}
// private helper methods
private char TryBestFit(byte[] bytesCheck)
{
// Need to figure out our best fit character, low is beginning of array, high is 1 AFTER end of array
int lowBound = 0;
int highBound = _oFallback.arrayBestFit!.Length;
int index;
char cCheck;
// Check trivial case first (no best fit)
if (highBound == 0)
return '\0';
// If our array is too small or too big we can't check
if (bytesCheck.Length == 0 || bytesCheck.Length > 2)
return '\0';
if (bytesCheck.Length == 1)
cCheck = unchecked((char)bytesCheck[0]);
else
cCheck = unchecked((char)((bytesCheck[0] << 8) + bytesCheck[1]));
// Check trivial out of range case
if (cCheck < _oFallback.arrayBestFit[0] || cCheck > _oFallback.arrayBestFit[highBound - 2])
return '\0';
// Binary search the array
int iDiff;
while ((iDiff = (highBound - lowBound)) > 6)
{
// Look in the middle, which is complicated by the fact that we have 2 #s for each pair,
// so we don't want index to be odd because it must be word aligned.
// Also note that index can never == highBound (because diff is rounded down)
index = ((iDiff / 2) + lowBound) & 0xFFFE;
char cTest = _oFallback.arrayBestFit[index];
if (cTest == cCheck)
{
// We found it
Debug.Assert(index + 1 < _oFallback.arrayBestFit.Length,
"[InternalDecoderBestFitFallbackBuffer.TryBestFit]Expected replacement character at end of array");
return _oFallback.arrayBestFit[index + 1];
}
else if (cTest < cCheck)
{
// We weren't high enough
lowBound = index;
}
else
{
// We weren't low enough
highBound = index;
}
}
for (index = lowBound; index < highBound; index += 2)
{
if (_oFallback.arrayBestFit[index] == cCheck)
{
// We found it
Debug.Assert(index + 1 < _oFallback.arrayBestFit.Length,
"[InternalDecoderBestFitFallbackBuffer.TryBestFit]Expected replacement character at end of array");
return _oFallback.arrayBestFit[index + 1];
}
}
// Char wasn't in our table
return '\0';
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System {
using System;
using System.Threading;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Runtime.Serialization;
using System.Security.Permissions;
using System.Diagnostics.Contracts;
// DateTimeOffset is a value type that consists of a DateTime and a time zone offset,
// ie. how far away the time is from GMT. The DateTime is stored whole, and the offset
// is stored as an Int16 internally to save space, but presented as a TimeSpan.
//
// The range is constrained so that both the represented clock time and the represented
// UTC time fit within the boundaries of MaxValue. This gives it the same range as DateTime
// for actual UTC times, and a slightly constrained range on one end when an offset is
// present.
//
// This class should be substitutable for date time in most cases; so most operations
// effectively work on the clock time. However, the underlying UTC time is what counts
// for the purposes of identity, sorting and subtracting two instances.
//
//
// There are theoretically two date times stored, the UTC and the relative local representation
// or the 'clock' time. It actually does not matter which is stored in m_dateTime, so it is desirable
// for most methods to go through the helpers UtcDateTime and ClockDateTime both to abstract this
// out and for internal readability.
[StructLayout(LayoutKind.Auto)]
#if FEATURE_SERIALIZATION
[Serializable]
#endif
public struct DateTimeOffset : IComparable, IFormattable, ISerializable, IDeserializationCallback,
IComparable<DateTimeOffset>, IEquatable<DateTimeOffset> {
// Constants
internal const Int64 MaxOffset = TimeSpan.TicksPerHour * 14;
internal const Int64 MinOffset = -MaxOffset;
private const long UnixEpochTicks = TimeSpan.TicksPerDay * DateTime.DaysTo1970; // 621,355,968,000,000,000
private const long UnixEpochSeconds = UnixEpochTicks / TimeSpan.TicksPerSecond; // 62,135,596,800
private const long UnixEpochMilliseconds = UnixEpochTicks / TimeSpan.TicksPerMillisecond; // 62,135,596,800,000
internal const long UnixMinSeconds = DateTime.MinTicks / TimeSpan.TicksPerSecond - UnixEpochSeconds;
internal const long UnixMaxSeconds = DateTime.MaxTicks / TimeSpan.TicksPerSecond - UnixEpochSeconds;
// Static Fields
public static readonly DateTimeOffset MinValue = new DateTimeOffset(DateTime.MinTicks, TimeSpan.Zero);
public static readonly DateTimeOffset MaxValue = new DateTimeOffset(DateTime.MaxTicks, TimeSpan.Zero);
// Instance Fields
private DateTime m_dateTime;
private Int16 m_offsetMinutes;
// Constructors
// Constructs a DateTimeOffset from a tick count and offset
public DateTimeOffset(long ticks, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
// Let the DateTime constructor do the range checks
DateTime dateTime = new DateTime(ticks);
m_dateTime = ValidateDate(dateTime, offset);
}
// Constructs a DateTimeOffset from a DateTime. For Local and Unspecified kinds,
// extracts the local offset. For UTC, creates a UTC instance with a zero offset.
public DateTimeOffset(DateTime dateTime) {
TimeSpan offset;
if (dateTime.Kind != DateTimeKind.Utc) {
// Local and Unspecified are both treated as Local
offset = TimeZoneInfo.GetLocalUtcOffset(dateTime, TimeZoneInfoOptions.NoThrowOnInvalidTime);
}
else {
offset = new TimeSpan(0);
}
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(dateTime, offset);
}
// Constructs a DateTimeOffset from a DateTime. And an offset. Always makes the clock time
// consistent with the DateTime. For Utc ensures the offset is zero. For local, ensures that
// the offset corresponds to the local.
public DateTimeOffset(DateTime dateTime, TimeSpan offset) {
if (dateTime.Kind == DateTimeKind.Local) {
if (offset != TimeZoneInfo.GetLocalUtcOffset(dateTime, TimeZoneInfoOptions.NoThrowOnInvalidTime)) {
throw new ArgumentException(Environment.GetResourceString("Argument_OffsetLocalMismatch"), "offset");
}
}
else if (dateTime.Kind == DateTimeKind.Utc) {
if (offset != TimeSpan.Zero) {
throw new ArgumentException(Environment.GetResourceString("Argument_OffsetUtcMismatch"), "offset");
}
}
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(dateTime, offset);
}
// Constructs a DateTimeOffset from a given year, month, day, hour,
// minute, second and offset.
public DateTimeOffset(int year, int month, int day, int hour, int minute, int second, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(new DateTime(year, month, day, hour, minute, second), offset);
}
// Constructs a DateTimeOffset from a given year, month, day, hour,
// minute, second, millsecond and offset
public DateTimeOffset(int year, int month, int day, int hour, int minute, int second, int millisecond, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(new DateTime(year, month, day, hour, minute, second, millisecond), offset);
}
// Constructs a DateTimeOffset from a given year, month, day, hour,
// minute, second, millsecond, Calendar and offset.
public DateTimeOffset(int year, int month, int day, int hour, int minute, int second, int millisecond, Calendar calendar, TimeSpan offset) {
m_offsetMinutes = ValidateOffset(offset);
m_dateTime = ValidateDate(new DateTime(year, month, day, hour, minute, second, millisecond, calendar), offset);
}
// Returns a DateTimeOffset representing the current date and time. The
// resolution of the returned value depends on the system timer. For
// Windows NT 3.5 and later the timer resolution is approximately 10ms,
// for Windows NT 3.1 it is approximately 16ms, and for Windows 95 and 98
// it is approximately 55ms.
//
public static DateTimeOffset Now {
get {
return new DateTimeOffset(DateTime.Now);
}
}
public static DateTimeOffset UtcNow {
get {
return new DateTimeOffset(DateTime.UtcNow);
}
}
public DateTime DateTime {
get {
return ClockDateTime;
}
}
public DateTime UtcDateTime {
[Pure]
get {
Contract.Ensures(Contract.Result<DateTime>().Kind == DateTimeKind.Utc);
return DateTime.SpecifyKind(m_dateTime, DateTimeKind.Utc);
}
}
public DateTime LocalDateTime {
[Pure]
get {
Contract.Ensures(Contract.Result<DateTime>().Kind == DateTimeKind.Local);
return UtcDateTime.ToLocalTime();
}
}
// Adjust to a given offset with the same UTC time. Can throw ArgumentException
//
public DateTimeOffset ToOffset(TimeSpan offset) {
return new DateTimeOffset((m_dateTime + offset).Ticks, offset);
}
// Instance Properties
// The clock or visible time represented. This is just a wrapper around the internal date because this is
// the chosen storage mechanism. Going through this helper is good for readability and maintainability.
// This should be used for display but not identity.
private DateTime ClockDateTime {
get {
return new DateTime((m_dateTime + Offset).Ticks, DateTimeKind.Unspecified);
}
}
// Returns the date part of this DateTimeOffset. The resulting value
// corresponds to this DateTimeOffset with the time-of-day part set to
// zero (midnight).
//
public DateTime Date {
get {
return ClockDateTime.Date;
}
}
// Returns the day-of-month part of this DateTimeOffset. The returned
// value is an integer between 1 and 31.
//
public int Day {
get {
Contract.Ensures(Contract.Result<int>() >= 1);
Contract.Ensures(Contract.Result<int>() <= 31);
return ClockDateTime.Day;
}
}
// Returns the day-of-week part of this DateTimeOffset. The returned value
// is an integer between 0 and 6, where 0 indicates Sunday, 1 indicates
// Monday, 2 indicates Tuesday, 3 indicates Wednesday, 4 indicates
// Thursday, 5 indicates Friday, and 6 indicates Saturday.
//
public DayOfWeek DayOfWeek {
get {
Contract.Ensures(Contract.Result<DayOfWeek>() >= DayOfWeek.Sunday);
Contract.Ensures(Contract.Result<DayOfWeek>() <= DayOfWeek.Saturday);
return ClockDateTime.DayOfWeek;
}
}
// Returns the day-of-year part of this DateTimeOffset. The returned value
// is an integer between 1 and 366.
//
public int DayOfYear {
get {
Contract.Ensures(Contract.Result<int>() >= 1);
Contract.Ensures(Contract.Result<int>() <= 366); // leap year
return ClockDateTime.DayOfYear;
}
}
// Returns the hour part of this DateTimeOffset. The returned value is an
// integer between 0 and 23.
//
public int Hour {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 24);
return ClockDateTime.Hour;
}
}
// Returns the millisecond part of this DateTimeOffset. The returned value
// is an integer between 0 and 999.
//
public int Millisecond {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 1000);
return ClockDateTime.Millisecond;
}
}
// Returns the minute part of this DateTimeOffset. The returned value is
// an integer between 0 and 59.
//
public int Minute {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 60);
return ClockDateTime.Minute;
}
}
// Returns the month part of this DateTimeOffset. The returned value is an
// integer between 1 and 12.
//
public int Month {
get {
Contract.Ensures(Contract.Result<int>() >= 1);
return ClockDateTime.Month;
}
}
public TimeSpan Offset {
get {
return new TimeSpan(0, m_offsetMinutes, 0);
}
}
// Returns the second part of this DateTimeOffset. The returned value is
// an integer between 0 and 59.
//
public int Second {
get {
Contract.Ensures(Contract.Result<int>() >= 0);
Contract.Ensures(Contract.Result<int>() < 60);
return ClockDateTime.Second;
}
}
// Returns the tick count for this DateTimeOffset. The returned value is
// the number of 100-nanosecond intervals that have elapsed since 1/1/0001
// 12:00am.
//
public long Ticks {
get {
return ClockDateTime.Ticks;
}
}
public long UtcTicks {
get {
return UtcDateTime.Ticks;
}
}
// Returns the time-of-day part of this DateTimeOffset. The returned value
// is a TimeSpan that indicates the time elapsed since midnight.
//
public TimeSpan TimeOfDay {
get {
return ClockDateTime.TimeOfDay;
}
}
// Returns the year part of this DateTimeOffset. The returned value is an
// integer between 1 and 9999.
//
public int Year {
get {
Contract.Ensures(Contract.Result<int>() >= 1 && Contract.Result<int>() <= 9999);
return ClockDateTime.Year;
}
}
// Returns the DateTimeOffset resulting from adding the given
// TimeSpan to this DateTimeOffset.
//
public DateTimeOffset Add(TimeSpan timeSpan) {
return new DateTimeOffset(ClockDateTime.Add(timeSpan), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// days to this DateTimeOffset. The result is computed by rounding the
// fractional number of days given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddDays(double days) {
return new DateTimeOffset(ClockDateTime.AddDays(days), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// hours to this DateTimeOffset. The result is computed by rounding the
// fractional number of hours given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddHours(double hours) {
return new DateTimeOffset(ClockDateTime.AddHours(hours), Offset);
}
// Returns the DateTimeOffset resulting from the given number of
// milliseconds to this DateTimeOffset. The result is computed by rounding
// the number of milliseconds given by value to the nearest integer,
// and adding that interval to this DateTimeOffset. The value
// argument is permitted to be negative.
//
public DateTimeOffset AddMilliseconds(double milliseconds) {
return new DateTimeOffset(ClockDateTime.AddMilliseconds(milliseconds), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// minutes to this DateTimeOffset. The result is computed by rounding the
// fractional number of minutes given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddMinutes(double minutes) {
return new DateTimeOffset(ClockDateTime.AddMinutes(minutes), Offset);
}
public DateTimeOffset AddMonths(int months) {
return new DateTimeOffset(ClockDateTime.AddMonths(months), Offset);
}
// Returns the DateTimeOffset resulting from adding a fractional number of
// seconds to this DateTimeOffset. The result is computed by rounding the
// fractional number of seconds given by value to the nearest
// millisecond, and adding that interval to this DateTimeOffset. The
// value argument is permitted to be negative.
//
public DateTimeOffset AddSeconds(double seconds) {
return new DateTimeOffset(ClockDateTime.AddSeconds(seconds), Offset);
}
// Returns the DateTimeOffset resulting from adding the given number of
// 100-nanosecond ticks to this DateTimeOffset. The value argument
// is permitted to be negative.
//
public DateTimeOffset AddTicks(long ticks) {
return new DateTimeOffset(ClockDateTime.AddTicks(ticks), Offset);
}
// Returns the DateTimeOffset resulting from adding the given number of
// years to this DateTimeOffset. The result is computed by incrementing
// (or decrementing) the year part of this DateTimeOffset by value
// years. If the month and day of this DateTimeOffset is 2/29, and if the
// resulting year is not a leap year, the month and day of the resulting
// DateTimeOffset becomes 2/28. Otherwise, the month, day, and time-of-day
// parts of the result are the same as those of this DateTimeOffset.
//
public DateTimeOffset AddYears(int years) {
return new DateTimeOffset(ClockDateTime.AddYears(years), Offset);
}
// Compares two DateTimeOffset values, returning an integer that indicates
// their relationship.
//
public static int Compare(DateTimeOffset first, DateTimeOffset second) {
return DateTime.Compare(first.UtcDateTime, second.UtcDateTime);
}
// Compares this DateTimeOffset to a given object. This method provides an
// implementation of the IComparable interface. The object
// argument must be another DateTimeOffset, or otherwise an exception
// occurs. Null is considered less than any instance.
//
int IComparable.CompareTo(Object obj) {
if (obj == null) return 1;
if (!(obj is DateTimeOffset)) {
throw new ArgumentException(Environment.GetResourceString("Arg_MustBeDateTimeOffset"));
}
DateTime objUtc = ((DateTimeOffset)obj).UtcDateTime;
DateTime utc = UtcDateTime;
if (utc > objUtc) return 1;
if (utc < objUtc) return -1;
return 0;
}
public int CompareTo(DateTimeOffset other) {
DateTime otherUtc = other.UtcDateTime;
DateTime utc = UtcDateTime;
if (utc > otherUtc) return 1;
if (utc < otherUtc) return -1;
return 0;
}
// Checks if this DateTimeOffset is equal to a given object. Returns
// true if the given object is a boxed DateTimeOffset and its value
// is equal to the value of this DateTimeOffset. Returns false
// otherwise.
//
public override bool Equals(Object obj) {
if (obj is DateTimeOffset) {
return UtcDateTime.Equals(((DateTimeOffset)obj).UtcDateTime);
}
return false;
}
public bool Equals(DateTimeOffset other) {
return UtcDateTime.Equals(other.UtcDateTime);
}
public bool EqualsExact(DateTimeOffset other) {
//
// returns true when the ClockDateTime, Kind, and Offset match
//
// currently the Kind should always be Unspecified, but there is always the possibility that a future version
// of DateTimeOffset overloads the Kind field
//
return (ClockDateTime == other.ClockDateTime && Offset == other.Offset && ClockDateTime.Kind == other.ClockDateTime.Kind);
}
// Compares two DateTimeOffset values for equality. Returns true if
// the two DateTimeOffset values are equal, or false if they are
// not equal.
//
public static bool Equals(DateTimeOffset first, DateTimeOffset second) {
return DateTime.Equals(first.UtcDateTime, second.UtcDateTime);
}
// Creates a DateTimeOffset from a Windows filetime. A Windows filetime is
// a long representing the date and time as the number of
// 100-nanosecond intervals that have elapsed since 1/1/1601 12:00am.
//
public static DateTimeOffset FromFileTime(long fileTime) {
return new DateTimeOffset(DateTime.FromFileTime(fileTime));
}
public static DateTimeOffset FromUnixTimeSeconds(long seconds) {
if (seconds < UnixMinSeconds || seconds > UnixMaxSeconds) {
throw new ArgumentOutOfRangeException("seconds",
string.Format(Environment.GetResourceString("ArgumentOutOfRange_Range"), UnixMinSeconds, UnixMaxSeconds));
}
long ticks = seconds * TimeSpan.TicksPerSecond + UnixEpochTicks;
return new DateTimeOffset(ticks, TimeSpan.Zero);
}
public static DateTimeOffset FromUnixTimeMilliseconds(long milliseconds) {
const long MinMilliseconds = DateTime.MinTicks / TimeSpan.TicksPerMillisecond - UnixEpochMilliseconds;
const long MaxMilliseconds = DateTime.MaxTicks / TimeSpan.TicksPerMillisecond - UnixEpochMilliseconds;
if (milliseconds < MinMilliseconds || milliseconds > MaxMilliseconds) {
throw new ArgumentOutOfRangeException("milliseconds",
string.Format(Environment.GetResourceString("ArgumentOutOfRange_Range"), MinMilliseconds, MaxMilliseconds));
}
long ticks = milliseconds * TimeSpan.TicksPerMillisecond + UnixEpochTicks;
return new DateTimeOffset(ticks, TimeSpan.Zero);
}
// ----- SECTION: private serialization instance methods ----------------*
#if FEATURE_SERIALIZATION
void IDeserializationCallback.OnDeserialization(Object sender) {
try {
m_offsetMinutes = ValidateOffset(Offset);
m_dateTime = ValidateDate(ClockDateTime, Offset);
}
catch (ArgumentException e) {
throw new SerializationException(Environment.GetResourceString("Serialization_InvalidData"), e);
}
}
[System.Security.SecurityCritical] // auto-generated_required
void ISerializable.GetObjectData(SerializationInfo info, StreamingContext context) {
if (info == null) {
throw new ArgumentNullException("info");
}
Contract.EndContractBlock();
info.AddValue("DateTime", m_dateTime);
info.AddValue("OffsetMinutes", m_offsetMinutes);
}
DateTimeOffset(SerializationInfo info, StreamingContext context) {
if (info == null) {
throw new ArgumentNullException("info");
}
m_dateTime = (DateTime)info.GetValue("DateTime", typeof(DateTime));
m_offsetMinutes = (Int16)info.GetValue("OffsetMinutes", typeof(Int16));
}
#endif
// Returns the hash code for this DateTimeOffset.
//
public override int GetHashCode() {
return UtcDateTime.GetHashCode();
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset Parse(String input) {
TimeSpan offset;
DateTime dateResult = DateTimeParse.Parse(input,
DateTimeFormatInfo.CurrentInfo,
DateTimeStyles.None,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset Parse(String input, IFormatProvider formatProvider) {
return Parse(input, formatProvider, DateTimeStyles.None);
}
public static DateTimeOffset Parse(String input, IFormatProvider formatProvider, DateTimeStyles styles) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult = DateTimeParse.Parse(input,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset ParseExact(String input, String format, IFormatProvider formatProvider) {
return ParseExact(input, format, formatProvider, DateTimeStyles.None);
}
// Constructs a DateTimeOffset from a string. The string must specify a
// date and optionally a time in a culture-specific or universal format.
// Leading and trailing whitespace characters are allowed.
//
public static DateTimeOffset ParseExact(String input, String format, IFormatProvider formatProvider, DateTimeStyles styles) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult = DateTimeParse.ParseExact(input,
format,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
public static DateTimeOffset ParseExact(String input, String[] formats, IFormatProvider formatProvider, DateTimeStyles styles) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult = DateTimeParse.ParseExactMultiple(input,
formats,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out offset);
return new DateTimeOffset(dateResult.Ticks, offset);
}
public TimeSpan Subtract(DateTimeOffset value) {
return UtcDateTime.Subtract(value.UtcDateTime);
}
public DateTimeOffset Subtract(TimeSpan value) {
return new DateTimeOffset(ClockDateTime.Subtract(value), Offset);
}
public long ToFileTime() {
return UtcDateTime.ToFileTime();
}
public long ToUnixTimeSeconds() {
// Truncate sub-second precision before offsetting by the Unix Epoch to avoid
// the last digit being off by one for dates that result in negative Unix times.
//
// For example, consider the DateTimeOffset 12/31/1969 12:59:59.001 +0
// ticks = 621355967990010000
// ticksFromEpoch = ticks - UnixEpochTicks = -9990000
// secondsFromEpoch = ticksFromEpoch / TimeSpan.TicksPerSecond = 0
//
// Notice that secondsFromEpoch is rounded *up* by the truncation induced by integer division,
// whereas we actually always want to round *down* when converting to Unix time. This happens
// automatically for positive Unix time values. Now the example becomes:
// seconds = ticks / TimeSpan.TicksPerSecond = 62135596799
// secondsFromEpoch = seconds - UnixEpochSeconds = -1
//
// In other words, we want to consistently round toward the time 1/1/0001 00:00:00,
// rather than toward the Unix Epoch (1/1/1970 00:00:00).
long seconds = UtcDateTime.Ticks / TimeSpan.TicksPerSecond;
return seconds - UnixEpochSeconds;
}
public long ToUnixTimeMilliseconds() {
// Truncate sub-millisecond precision before offsetting by the Unix Epoch to avoid
// the last digit being off by one for dates that result in negative Unix times
long milliseconds = UtcDateTime.Ticks / TimeSpan.TicksPerMillisecond;
return milliseconds - UnixEpochMilliseconds;
}
public DateTimeOffset ToLocalTime() {
return ToLocalTime(false);
}
internal DateTimeOffset ToLocalTime(bool throwOnOverflow)
{
return new DateTimeOffset(UtcDateTime.ToLocalTime(throwOnOverflow));
}
public override String ToString() {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, null, DateTimeFormatInfo.CurrentInfo, Offset);
}
public String ToString(String format) {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, format, DateTimeFormatInfo.CurrentInfo, Offset);
}
public String ToString(IFormatProvider formatProvider) {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, null, DateTimeFormatInfo.GetInstance(formatProvider), Offset);
}
public String ToString(String format, IFormatProvider formatProvider) {
Contract.Ensures(Contract.Result<String>() != null);
return DateTimeFormat.Format(ClockDateTime, format, DateTimeFormatInfo.GetInstance(formatProvider), Offset);
}
public DateTimeOffset ToUniversalTime() {
return new DateTimeOffset(UtcDateTime);
}
public static Boolean TryParse(String input, out DateTimeOffset result) {
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParse(input,
DateTimeFormatInfo.CurrentInfo,
DateTimeStyles.None,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
public static Boolean TryParse(String input, IFormatProvider formatProvider, DateTimeStyles styles, out DateTimeOffset result) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParse(input,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
public static Boolean TryParseExact(String input, String format, IFormatProvider formatProvider, DateTimeStyles styles,
out DateTimeOffset result) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParseExact(input,
format,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
public static Boolean TryParseExact(String input, String[] formats, IFormatProvider formatProvider, DateTimeStyles styles,
out DateTimeOffset result) {
styles = ValidateStyles(styles, "styles");
TimeSpan offset;
DateTime dateResult;
Boolean parsed = DateTimeParse.TryParseExactMultiple(input,
formats,
DateTimeFormatInfo.GetInstance(formatProvider),
styles,
out dateResult,
out offset);
result = new DateTimeOffset(dateResult.Ticks, offset);
return parsed;
}
// Ensures the TimeSpan is valid to go in a DateTimeOffset.
private static Int16 ValidateOffset(TimeSpan offset) {
Int64 ticks = offset.Ticks;
if (ticks % TimeSpan.TicksPerMinute != 0) {
throw new ArgumentException(Environment.GetResourceString("Argument_OffsetPrecision"), "offset");
}
if (ticks < MinOffset || ticks > MaxOffset) {
throw new ArgumentOutOfRangeException("offset", Environment.GetResourceString("Argument_OffsetOutOfRange"));
}
return (Int16)(offset.Ticks / TimeSpan.TicksPerMinute);
}
// Ensures that the time and offset are in range.
private static DateTime ValidateDate(DateTime dateTime, TimeSpan offset) {
// The key validation is that both the UTC and clock times fit. The clock time is validated
// by the DateTime constructor.
Contract.Assert(offset.Ticks >= MinOffset && offset.Ticks <= MaxOffset, "Offset not validated.");
// This operation cannot overflow because offset should have already been validated to be within
// 14 hours and the DateTime instance is more than that distance from the boundaries of Int64.
Int64 utcTicks = dateTime.Ticks - offset.Ticks;
if (utcTicks < DateTime.MinTicks || utcTicks > DateTime.MaxTicks) {
throw new ArgumentOutOfRangeException("offset", Environment.GetResourceString("Argument_UTCOutOfRange"));
}
// make sure the Kind is set to Unspecified
//
return new DateTime(utcTicks, DateTimeKind.Unspecified);
}
private static DateTimeStyles ValidateStyles(DateTimeStyles style, String parameterName) {
if ((style & DateTimeFormatInfo.InvalidDateTimeStyles) != 0) {
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDateTimeStyles"), parameterName);
}
if (((style & (DateTimeStyles.AssumeLocal)) != 0) && ((style & (DateTimeStyles.AssumeUniversal)) != 0)) {
throw new ArgumentException(Environment.GetResourceString("Argument_ConflictingDateTimeStyles"), parameterName);
}
if ((style & DateTimeStyles.NoCurrentDateDefault) != 0) {
throw new ArgumentException(Environment.GetResourceString("Argument_DateTimeOffsetInvalidDateTimeStyles"), parameterName);
}
Contract.EndContractBlock();
// RoundtripKind does not make sense for DateTimeOffset; ignore this flag for backward compatibility with DateTime
style &= ~DateTimeStyles.RoundtripKind;
// AssumeLocal is also ignored as that is what we do by default with DateTimeOffset.Parse
style &= ~DateTimeStyles.AssumeLocal;
return style;
}
// Operators
public static implicit operator DateTimeOffset (DateTime dateTime) {
return new DateTimeOffset(dateTime);
}
public static DateTimeOffset operator +(DateTimeOffset dateTimeOffset, TimeSpan timeSpan) {
return new DateTimeOffset(dateTimeOffset.ClockDateTime + timeSpan, dateTimeOffset.Offset);
}
public static DateTimeOffset operator -(DateTimeOffset dateTimeOffset, TimeSpan timeSpan) {
return new DateTimeOffset(dateTimeOffset.ClockDateTime - timeSpan, dateTimeOffset.Offset);
}
public static TimeSpan operator -(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime - right.UtcDateTime;
}
public static bool operator ==(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime == right.UtcDateTime;
}
public static bool operator !=(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime != right.UtcDateTime;
}
public static bool operator <(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime < right.UtcDateTime;
}
public static bool operator <=(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime <= right.UtcDateTime;
}
public static bool operator >(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime > right.UtcDateTime;
}
public static bool operator >=(DateTimeOffset left, DateTimeOffset right) {
return left.UtcDateTime >= right.UtcDateTime;
}
}
}
| |
using System.Diagnostics;
namespace CORE_NAME
{
public enum TEXTENCODE : byte
{
UTF8 = 1,
UTF16LE = 2,
UTF16BE = 3,
UTF16 = 4, // Use native byte order
ANY = 5, // sqlite3_create_function only
UTF16_ALIGNED = 8, // sqlite3_create_collation only
//
UTF16NATIVE = UTF16LE,
}
public static class ConvertEx
{
#region Varint
// The variable-length integer encoding is as follows:
//
// KEY:
// A = 0xxxxxxx 7 bits of data and one flag bit
// B = 1xxxxxxx 7 bits of data and one flag bit
// C = xxxxxxxx 8 bits of data
// 7 bits - A
// 14 bits - BA
// 21 bits - BBA
// 28 bits - BBBA
// 35 bits - BBBBA
// 42 bits - BBBBBA
// 49 bits - BBBBBBA
// 56 bits - BBBBBBBA
// 64 bits - BBBBBBBBC
public static byte PutVarint(byte[] p, int v) { return PutVarint(p, 0, (ulong)v); }
public static byte PutVarint(byte[] p, uint offset, int v) { return PutVarint(p, offset, (ulong)v); }
public static byte PutVarint(byte[] p, ulong v) { return PutVarint(p, 0, (ulong)v); }
public static byte PutVarint(byte[] p, uint offset, ulong v)
{
int i, j; byte n;
if ((v & (((ulong)0xff000000) << 32)) != 0)
{
p[offset + 8] = (byte)v;
v >>= 8;
for (i = 7; i >= 0; i--)
{
p[offset + i] = (byte)((v & 0x7f) | 0x80);
v >>= 7;
}
return 9;
}
n = 0;
var b = new byte[10];
do
{
b[n++] = (byte)((v & 0x7f) | 0x80);
v >>= 7;
} while (v != 0);
b[0] &= 0x7f;
Debug.Assert(n <= 9);
for (i = 0, j = n - 1; j >= 0; j--, i++)
p[offset + i] = b[j];
return n;
}
public static byte PutVarint32(byte[] p, int v)
{
if ((v & ~0x7f) == 0) { p[0] = (byte)v; return 1; }
if ((v & ~0x3fff) == 0) { p[0] = (byte)((v >> 7) | 0x80); p[1] = (byte)(v & 0x7f); return 2; }
return PutVarint(p, 0, v);
}
public static byte PutVarint32(byte[] p, uint offset, int v)
{
if ((v & ~0x7f) == 0) { p[offset] = (byte)v; return 1; }
if ((v & ~0x3fff) == 0) { p[offset] = (byte)((v >> 7) | 0x80); p[offset + 1] = (byte)(v & 0x7f); return 2; }
return PutVarint(p, offset, v);
}
// Bitmasks used by sqlite3GetVarint(). These precomputed constants are defined here rather than simply putting the constant expressions
// inline in order to work around bugs in the RVT compiler.
//
// SLOT_2_0 A mask for (0x7f<<14) | 0x7f
// SLOT_4_2_0 A mask for (0x7f<<28) | SLOT_2_0
private const uint SLOT_0_2_0 = 0x001fc07f;
private const uint SLOT_4_2_0 = 0xf01fc07f;
private const uint MAX_U32 = (uint)((((ulong)1) << 32) - 1);
public static byte GetVarint(byte[] p, out int v) { v = p[0]; if (v < 0x80) return 1; ulong uv; var r = _getVarint(p, 0, out uv); v = (int)uv; return r; }
public static byte GetVarint(byte[] p, out uint v) { v = p[0]; if (v < 0x80) return 1; ulong uv; var r = _getVarint(p, 0, out uv); v = (uint)uv; return r; }
public static byte GetVarint(byte[] p, uint offset, out int v) { v = p[offset]; if (v < 0x80) return 1; ulong uv; var r = _getVarint(p, offset, out uv); v = (int)uv; return r; }
public static byte GetVarint(byte[] p, uint offset, out uint v) { v = p[offset]; if (v < 0x80) return 1; ulong uv; var r = _getVarint(p, offset, out uv); v = (uint)uv; return r; }
public static byte GetVarint(byte[] p, uint offset, out long v) { v = p[offset]; if (v < 0x80) return 1; ulong uv; var r = _getVarint(p, offset, out uv); v = (long)uv; return r; }
public static byte GetVarint(byte[] p, uint offset, out ulong v) { v = p[offset]; if (v < 0x80) return 1; var r = _getVarint(p, offset, out v); return r; }
private static byte _getVarint(byte[] p, uint offset, out ulong v)
{
uint a, b, s;
a = p[offset + 0];
// a: p0 (unmasked)
if ((a & 0x80) == 0)
{
v = a;
return 1;
}
b = p[offset + 1];
// b: p1 (unmasked)
if (0 == (b & 0x80))
{
a &= 0x7f;
a = a << 7;
a |= b;
v = a;
return 2;
}
// Verify that constants are precomputed correctly
Debug.Assert(SLOT_0_2_0 == ((0x7f << 14) | 0x7f));
Debug.Assert(SLOT_4_2_0 == ((0xfU << 28) | (0x7f << 14) | 0x7f));
a = a << 14;
a |= p[offset + 2];
// a: p0<<14 | p2 (unmasked)
if (0 == (a & 0x80))
{
a &= SLOT_0_2_0;
b &= 0x7f;
b = b << 7;
a |= b;
v = a;
return 3;
}
// CSE1 from below
a &= SLOT_0_2_0;
b = b << 14;
b |= p[offset + 3];
// b: p1<<14 | p3 (unmasked)
if (0 == (b & 0x80))
{
b &= SLOT_0_2_0;
// moved CSE1 up
// a &= (0x7f<<14)|0x7f;
a = a << 7;
a |= b;
v = a;
return 4;
}
// a: p0<<14 | p2 (masked)
// b: p1<<14 | p3 (unmasked)
// 1:save off p0<<21 | p1<<14 | p2<<7 | p3 (masked)
// moved CSE1 up
// a &= (0x7f<<14)|0x7f;
b &= SLOT_0_2_0;
s = a;
// s: p0<<14 | p2 (masked)
a = a << 14;
a |= p[offset + 4];
// a: p0<<28 | p2<<14 | p4 (unmasked)
if (0 == (a & 0x80))
{
b = b << 7;
a |= b;
s = s >> 18;
v = ((ulong)s) << 32 | a;
return 5;
}
// 2:save off p0<<21 | p1<<14 | p2<<7 | p3 (masked)
s = s << 7;
s |= b;
// s: p0<<21 | p1<<14 | p2<<7 | p3 (masked)
b = b << 14;
b |= p[offset + 5];
// b: p1<<28 | p3<<14 | p5 (unmasked)
if (0 == (b & 0x80))
{
a &= SLOT_0_2_0;
a = a << 7;
a |= b;
s = s >> 18;
v = ((ulong)s) << 32 | a;
return 6;
}
a = a << 14;
a |= p[offset + 6];
// a: p2<<28 | p4<<14 | p6 (unmasked)
if (0 == (a & 0x80))
{
a &= SLOT_4_2_0;
b &= SLOT_0_2_0;
b = b << 7;
a |= b;
s = s >> 11;
v = ((ulong)s) << 32 | a;
return 7;
}
// CSE2 from below
a &= SLOT_0_2_0;
//p++;
b = b << 14;
b |= p[offset + 7];
// b: p3<<28 | p5<<14 | p7 (unmasked)
if (0 == (b & 0x80))
{
b &= SLOT_4_2_0;
// moved CSE2 up
a = a << 7;
a |= b;
s = s >> 4;
v = ((ulong)s) << 32 | a;
return 8;
}
a = a << 15;
a |= p[offset + 8];
// a: p4<<29 | p6<<15 | p8 (unmasked)
// moved CSE2 up
b &= SLOT_0_2_0;
b = b << 8;
a |= b;
s = s << 4;
b = p[offset + 4];
b &= 0x7f;
b = b >> 3;
s |= b;
v = ((ulong)s) << 32 | a;
return 9;
}
public static byte GetVarint32(byte[] p, out int v) { v = p[0]; if (v < 0x80) return 1; uint uv; var r = _getVarint32(p, 0, out uv); v = (int)uv; return r; }
public static byte GetVarint32(byte[] p, out uint v) { v = p[0]; if (v < 0x80) return 1; return _getVarint32(p, 0, out v); }
public static byte GetVarint32(byte[] p, uint offset, out int v) { v = p[offset]; if (v < 0x80) return 1; uint uv; var r = _getVarint32(p, offset, out uv); v = (int)uv; return r; }
public static byte GetVarint32(byte[] p, uint offset, out uint v) { v = p[offset]; if (v < 0x80) return 1; return _getVarint32(p, offset, out v); }
private static byte _getVarint32(byte[] p, uint offset, out uint v)
{
uint a, b;
// The 1-byte case. Overwhelmingly the most common. Handled inline by the getVarin32() macro
a = p[offset + 0];
// a: p0 (unmasked)
// The 2-byte case
b = (offset + 1 < p.Length ? p[offset + 1] : (uint)0);
// b: p1 (unmasked)
if (0 == (b & 0x80))
{
// Values between 128 and 16383
a &= 0x7f;
a = a << 7;
v = a | b;
return 2;
}
// The 3-byte case
a = a << 14;
a |= (offset + 2 < p.Length ? p[offset + 2] : (uint)0);
// a: p0<<14 | p2 (unmasked)
if (0 == (a & 0x80))
{
// Values between 16384 and 2097151
a &= (0x7f << 14) | (0x7f);
b &= 0x7f;
b = b << 7;
v = a | b;
return 3;
}
// A 32-bit varint is used to store size information in btrees. Objects are rarely larger than 2MiB limit of a 3-byte varint.
// A 3-byte varint is sufficient, for example, to record the size of a 1048569-byte BLOB or string.
// We only unroll the first 1-, 2-, and 3- byte cases. The very rare larger cases can be handled by the slower 64-bit varint routine.
{
ulong ulong_v = 0;
byte n = _getVarint(p, offset, out ulong_v);
Debug.Assert(n > 3 && n <= 9);
v = ((ulong_v & MAX_U32) != ulong_v ? 0xffffffff : (uint)ulong_v);
return n;
}
}
public static byte GetVarint32(string p, uint offset, out int v)
{
v = p[(int)offset]; if (v < 0x80) return 1;
var a = new byte[4];
a[0] = (byte)p[(int)offset + 0];
a[1] = (byte)p[(int)offset + 1];
a[2] = (byte)p[(int)offset + 2];
a[3] = (byte)p[(int)offset + 3];
uint uv; var r = _getVarint32(a, 0, out uv); v = (int)uv; return r;
}
public static byte GetVarint32(string p, uint offset, out uint v)
{
v = p[(int)offset]; if (v < 0x80) return 1;
var a = new byte[4];
a[0] = (byte)p[(int)offset + 0];
a[1] = (byte)p[(int)offset + 1];
a[2] = (byte)p[(int)offset + 2];
a[3] = (byte)p[(int)offset + 3];
return _getVarint32(a, 0, out v);
}
public static byte GetVarintLength(ulong v)
{
byte i = 0;
do { i++; v >>= 7; }
while (v != 0 && C._ALWAYS(i < 9));
return i;
}
#endregion
#region Get/Put
public static uint Get4(byte[] p) { return (uint)((p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3]); }
public static uint Get4(byte[] p, int offset) { return (offset + 3 > p.Length) ? 0 : (uint)((p[0 + offset] << 24) | (p[1 + offset] << 16) | (p[2 + offset] << 8) | p[3 + offset]); }
public static uint Get4(byte[] p, uint offset) { return (offset + 3 > p.Length) ? 0 : (uint)((p[0 + offset] << 24) | (p[1 + offset] << 16) | (p[2 + offset] << 8) | p[3 + offset]); }
public static void Put4(byte[] p, int v)
{
p[0] = (byte)(v >> 24 & 0xFF);
p[1] = (byte)(v >> 16 & 0xFF);
p[2] = (byte)(v >> 8 & 0xFF);
p[3] = (byte)(v & 0xFF);
}
public static void Put4(byte[] p, uint v)
{
p[0] = (byte)(v >> 24 & 0xFF);
p[1] = (byte)(v >> 16 & 0xFF);
p[2] = (byte)(v >> 8 & 0xFF);
p[3] = (byte)(v & 0xFF);
}
public static void Put4(byte[] p, int offset, int v)
{
p[0 + offset] = (byte)(v >> 24 & 0xFF);
p[1 + offset] = (byte)(v >> 16 & 0xFF);
p[2 + offset] = (byte)(v >> 8 & 0xFF);
p[3 + offset] = (byte)(v & 0xFF);
}
public static void Put4(byte[] p, int offset, uint v)
{
p[0 + offset] = (byte)(v >> 24 & 0xFF);
p[1 + offset] = (byte)(v >> 16 & 0xFF);
p[2 + offset] = (byte)(v >> 8 & 0xFF);
p[3 + offset] = (byte)(v & 0xFF);
}
public static void Put4(byte[] p, uint offset, int v)
{
p[0 + offset] = (byte)(v >> 24 & 0xFF);
p[1 + offset] = (byte)(v >> 16 & 0xFF);
p[2 + offset] = (byte)(v >> 8 & 0xFF);
p[3 + offset] = (byte)(v & 0xFF);
}
public static void Put4(byte[] p, uint offset, uint v)
{
p[0 + offset] = (byte)(v >> 24 & 0xFF);
p[1 + offset] = (byte)(v >> 16 & 0xFF);
p[2 + offset] = (byte)(v >> 8 & 0xFF);
p[3 + offset] = (byte)(v & 0xFF);
}
public static void Put4L(byte[] p, long v)
{
p[0] = (byte)(v >> 24 & 0xFF);
p[1] = (byte)(v >> 16 & 0xFF);
p[2] = (byte)(v >> 8 & 0xFF);
p[3] = (byte)(v & 0xFF);
}
public static void Put4L(byte[] p, ulong v)
{
p[0] = (byte)(v >> 24 & 0xFF);
p[1] = (byte)(v >> 16 & 0xFF);
p[2] = (byte)(v >> 8 & 0xFF);
p[3] = (byte)(v & 0xFF);
}
public static void Put4L(byte[] p, uint offset, long v)
{
p[0 + offset] = (byte)(v >> 24 & 0xFF);
p[1 + offset] = (byte)(v >> 16 & 0xFF);
p[2 + offset] = (byte)(v >> 8 & 0xFF);
p[3 + offset] = (byte)(v & 0xFF);
}
public static void Put4L(byte[] p, uint offset, ulong v)
{
p[0 + offset] = (byte)(v >> 24 & 0xFF);
p[1 + offset] = (byte)(v >> 16 & 0xFF);
p[2 + offset] = (byte)(v >> 8 & 0xFF);
p[3 + offset] = (byte)(v & 0xFF);
}
//public static void Put4(string p, int offset, int v)
//{
// var a = new byte[4];
// a[0] = (byte)p[offset + 0];
// a[1] = (byte)p[offset + 1];
// a[2] = (byte)p[offset + 2];
// a[3] = (byte)p[offset + 3];
// Put4(a, 0, v);
//}
//public static void Put4(string p, int offset, uint v)
//{
// var a = new byte[4];
// a[0] = (byte)p[offset + 0];
// a[1] = (byte)p[offset + 1];
// a[2] = (byte)p[offset + 2];
// a[3] = (byte)p[offset + 3];
// Put4(a, 0, v);
//}
public static ushort Get2(byte[] p) { return (ushort)(p[0] << 8 | p[1]); }
public static ushort Get2(byte[] p, int offset) { return (ushort)(p[offset + 0] << 8 | p[offset + 1]); }
public static ushort Get2(byte[] p, uint offset) { return (ushort)(p[offset + 0] << 8 | p[offset + 1]); }
public static ushort Get2nz(byte[] p, int offset) { return (ushort)(((((int)Get2(p, offset)) - 1) & 0xffff) + 1); }
public static ushort Get2nz(byte[] p, uint offset) { return (ushort)(((((int)Get2(p, offset)) - 1) & 0xffff) + 1); }
public static void Put2(byte[] p, int v)
{
p[0] = (byte)(v >> 8);
p[1] = (byte)v;
}
public static void Put2(byte[] p, uint v)
{
p[0] = (byte)(v >> 8);
p[1] = (byte)v;
}
public static void Put2(byte[] p, int offset, int v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, int offset, uint v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, uint offset, int v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, uint offset, uint v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, int offset, ushort v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, int offset, short v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, uint offset, ushort v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
public static void Put2(byte[] p, uint offset, short v)
{
p[offset + 0] = (byte)(v >> 8);
p[offset + 1] = (byte)v;
}
#endregion
#region AtoX
public static bool Atof(string z, ref double out_, int length, TEXTENCODE encode)
{
#if !OMIT_FLOATING_POINT
out_ = 0.0; // Default return value, in case of an error
if (string.IsNullOrEmpty(z))
return false;
// getsize
int zIdx = 0;
int incr = (encode == TEXTENCODE.UTF8 ? 1 : 2);
if (encode == TEXTENCODE.UTF16BE) zIdx++;
// skip leading spaces
while (zIdx < length && char.IsWhiteSpace(z[zIdx])) zIdx++;
if (zIdx >= length) return false;
// get sign of significand
int sign = 1; // sign of significand
if (z[zIdx] == '-') { sign = -1; zIdx += incr; }
else if (z[zIdx] == '+') zIdx += incr;
// sign * significand * (10 ^ (esign * exponent))
long s = 0; // significand
int d = 0; // adjust exponent for shifting decimal point
int esign = 1; // sign of exponent
int e = 0; // exponent
bool nonNum = true; // True exponent is either not used or is well-formed
int digits = 0;
// skip leading zeroes
while (zIdx < z.Length && z[zIdx] == '0') { zIdx += incr; digits++; }
// copy max significant digits to significand
while (zIdx < length && char.IsDigit(z[zIdx]) && s < ((long.MaxValue - 9) / 10)) { s = s * 10 + (z[zIdx] - '0'); zIdx += incr; digits++; }
while (zIdx < length && char.IsDigit(z[zIdx])) { zIdx += incr; digits++; d++; }
if (zIdx >= length) goto do_atof_calc;
// if decimal point is present
if (z[zIdx] == '.')
{
zIdx += incr;
// copy digits from after decimal to significand (decrease exponent by d to shift decimal right)
while (zIdx < length && char.IsDigit(z[zIdx]) && s < ((long.MaxValue - 9) / 10)) { s = s * 10 + (z[zIdx] - '0'); zIdx += incr; digits++; d--; }
while (zIdx < length && char.IsDigit(z[zIdx])) { zIdx += incr; digits++; } // skip non-significant digits
}
if (zIdx >= length) goto do_atof_calc;
// if exponent is present
if (z[zIdx] == 'e' || z[zIdx] == 'E')
{
zIdx += incr;
nonNum = false;
if (zIdx >= length) goto do_atof_calc;
// get sign of exponent
if (z[zIdx] == '-') { esign = -1; zIdx += incr; }
else if (z[zIdx] == '+') zIdx += incr;
// copy digits to exponent
while (zIdx < length && char.IsDigit(z[zIdx])) { e = e * 10 + (z[zIdx] - '0'); zIdx += incr; nonNum = true; }
}
// skip trailing spaces
if (digits != 0 && nonNum) while (zIdx < length && char.IsWhiteSpace(z[zIdx])) zIdx += incr;
do_atof_calc:
// adjust exponent by d, and update sign
e = (e * esign) + d;
if (e < 0) { esign = -1; e *= -1; }
else esign = 1;
// if !significand
double result = 0.0;
if (s == 0)
result = (sign < 0 && digits != 0 ? -0.0 : 0.0); // In the IEEE 754 standard, zero is signed. Add the sign if we've seen at least one digit
else
{
// attempt to reduce exponent
if (esign > 0) while (s < (long.MaxValue / 10) && e > 0) { e--; s *= 10; }
else while ((s % 10) == 0 && e > 0) { e--; s /= 10; }
// adjust the sign of significand
s = (sign < 0 ? -s : s);
// if exponent, scale significand as appropriate and store in result.
if (e != 0)
{
double scale = 1.0;
// attempt to handle extremely small/large numbers better
if (e > 307 && e < 342)
{
while ((e % 308) != 0) { scale *= 1.0e+1; e -= 1; }
if (esign < 0) { result = s / scale; result /= 1.0e+308; }
else { result = s * scale; result *= 1.0e+308; }
}
else if (e >= 342)
result = (esign < 0 ? 0.0 * s : 1e308 * 1e308 * s); // Infinity
else
{
// 1.0e+22 is the largest power of 10 than can be represented exactly.
while ((e % 22) != 0) { scale *= 1.0e+1; e -= 1; }
while (e > 0) { scale *= 1.0e+22; e -= 22; }
result = (esign < 0 ? s / scale : s * scale);
}
}
else
result = (double)s;
}
out_ = result; // store the result
return (zIdx >= length && digits > 0 && nonNum); // return true if number and no extra non-whitespace chracters after
#else
return !Atoi64(z, out_, length, encode);
#endif
}
static int Compare2pow63(string z, int incr)
{
string pow63 = "922337203685477580"; // 012345678901234567
int c = 0;
for (int i = 0; c == 0 && i < 18; i++)
c = (z[i * incr] - pow63[i]) * 10;
if (c == 0)
{
c = z[18 * incr] - '8';
C.ASSERTCOVERAGE(c == -1);
C.ASSERTCOVERAGE(c == 0);
C.ASSERTCOVERAGE(c == +1);
}
return c;
}
public static int Atoi64(string z, out long out_, int length, TEXTENCODE encode)
{
if (z == null)
{
out_ = 0;
return 1;
}
// get size
int zIdx = 0;// string zStart;
int incr = (encode == TEXTENCODE.UTF8 ? 1 : 2);
if (encode == TEXTENCODE.UTF16BE) zIdx++;
// skip leading spaces
while (zIdx < length && char.IsWhiteSpace(z[zIdx])) zIdx += incr;
// get sign of significand
int neg = 0; // assume positive
if (zIdx < length)
{
if (z[zIdx] == '-') { neg = 1; zIdx += incr; }
else if (z[zIdx] == '+') zIdx += incr;
}
if (length > z.Length) length = z.Length;
// skip leading zeros
while (zIdx < length - 1 && z[zIdx] == '0') zIdx += incr;
// Skip leading zeros.
ulong u = 0;
int c = 0;
int i; for (i = zIdx; i < length && (c = z[i]) >= '0' && c <= '9'; i += incr) u = u * 10 + (ulong)(c - '0');
if (u > long.MaxValue) out_ = long.MinValue;
else out_ = (neg != 0 ? -(long)u : (long)u);
C.ASSERTCOVERAGE(i - zIdx == 18);
C.ASSERTCOVERAGE(i - zIdx == 19);
C.ASSERTCOVERAGE(i - zIdx == 20);
if ((c != 0 && i < length) || i == zIdx || i - zIdx > 19 * incr) return 1; // zNum is empty or contains non-numeric text or is longer than 19 digits (thus guaranteeing that it is too large)
else if (i - zIdx < 19 * incr) { Debug.Assert(u <= long.MaxValue); return 0; } // Less than 19 digits, so we know that it fits in 64 bits
else
{
c = Compare2pow63(z.Substring(zIdx), incr); // zNum is a 19-digit numbers. Compare it against 9223372036854775808.
if (c < 0) { Debug.Assert(u <= long.MaxValue); return 0; } // zNum is less than 9223372036854775808 so it fits
else if (c > 0) return 1; // zNum is greater than 9223372036854775808 so it overflows
else { Debug.Assert(u - 1 == long.MaxValue); Debug.Assert(out_ == long.MinValue); return neg != 0 ? 0 : 2; } // zNum is exactly 9223372036854775808. Fits if negative. The special case 2 overflow if positive
}
}
public static bool Atoi(string z, ref int out_) { return Atoi(z, 0, ref out_); }
public static bool Atoi(string z, int zIdx, ref int out_)
{
int neg = 0;
if (z[zIdx] == '-') { neg = 1; zIdx++; }
else if (z[zIdx] == '+') zIdx++;
while (zIdx < z.Length && z[zIdx] == '0') zIdx++;
long v = 0;
int i, c;
for (i = 0; i < 11 && i + zIdx < z.Length && (c = z[zIdx + i] - '0') >= 0 && c <= 9; i++) { v = v * 10 + c; }
// The longest decimal representation of a 32 bit integer is 10 digits:
// 1234567890
// 2^31 -> 2147483648
C.ASSERTCOVERAGE(i == 10);
if (i > 10) return false;
C.ASSERTCOVERAGE(v - neg == 2147483647);
if (v - neg > 2147483647) return false;
out_ = (int)(neg != 0 ? -v : v);
return true;
}
public static int Atoi(string z)
{
int x = 0;
if (!string.IsNullOrEmpty(z))
Atoi(z, ref x);
return x;
}
#endregion
#region From: Pragma_c
// 123456789 123456789
static readonly string _safetyLevelText = "onoffalseyestruefull";
static readonly int[] _safetyLevelOffset = new int[] { 0, 1, 2, 4, 9, 12, 16 };
static readonly int[] _safetyLevelLength = new int[] { 2, 2, 3, 5, 3, 4, 4 };
static readonly byte[] _safetyLevelValue = new byte[] { 1, 0, 0, 0, 1, 1, 2 };
public static byte GetSafetyLevel(string z, int omitFull, byte dflt)
{
if (char.IsDigit(z[0]))
return (byte)ConvertEx.Atoi(z);
int n = z.Length;
for (int i = 0; i < _safetyLevelLength.Length - omitFull; i++)
if (_safetyLevelLength[i] == n && string.CompareOrdinal(_safetyLevelText.Substring(_safetyLevelOffset[i]), 0, z, 0, n) == 0)
return _safetyLevelValue[i];
return dflt;
}
public static bool GetBoolean(string z, byte dflt)
{
return (GetSafetyLevel(z, 1, dflt) != 0);
}
#endregion
}
}
| |
/*
Copyright (c) 2003-2006 Niels Kokholm and Peter Sestoft
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
using System;
using System.Text;
namespace RazorDBx.C5
{
/// <summary>
/// <i>(Describe usage of "L:300" format string.)</i>
/// </summary>
public interface IShowable : IFormattable
{
//TODO: wonder if we should use TextWriters instead of StringBuilders?
/// <summary>
/// Format <code>this</code> using at most approximately <code>rest</code> chars and
/// append the result, possibly truncated, to stringbuilder.
/// Subtract the actual number of used chars from <code>rest</code>.
/// </summary>
/// <param name="stringbuilder"></param>
/// <param name="rest"></param>
/// <param name="formatProvider"></param>
/// <returns>True if the appended formatted string was complete (not truncated).</returns>
bool Show(StringBuilder stringbuilder, ref int rest, IFormatProvider formatProvider);
}
// ------------------------------------------------------------
// Static helper methods for Showing collections
/// <summary>
///
/// </summary>
public static class Showing
{
/// <summary>
/// Show <code>Object obj</code> by appending it to <code>stringbuilder</code>
/// </summary>
/// <param name="obj"></param>
/// <param name="stringbuilder"></param>
/// <param name="rest"></param>
/// <param name="formatProvider"></param>
/// <returns>True if <code>obj</code> was shown completely.</returns>
public static bool Show(Object obj, StringBuilder stringbuilder, ref int rest, IFormatProvider formatProvider)
{
IShowable showable;
if (rest <= 0)
return false;
else if ((showable = obj as IShowable) != null)
return showable.Show(stringbuilder, ref rest, formatProvider);
int oldLength = stringbuilder.Length;
stringbuilder.AppendFormat(formatProvider, "{0}", obj);
rest -= (stringbuilder.Length - oldLength);
return true;
}
/// <summary>
///
/// </summary>
/// <param name="showable"></param>
/// <param name="format"></param>
/// <param name="formatProvider"></param>
/// <returns></returns>
public static String ShowString(IShowable showable, String format, IFormatProvider formatProvider)
{
int rest = maxLength(format);
StringBuilder sb = new StringBuilder();
showable.Show(sb, ref rest, formatProvider);
return sb.ToString();
}
/// <summary>
///
/// </summary>
/// <param name="format"></param>
/// <returns></returns>
static int maxLength(String format)
{
//TODO: validate format string
if (format == null)
return 80;
if (format.Length > 1 && format.StartsWith("L"))
{
return int.Parse(format.Substring(1));
}
else
return int.MaxValue;
}
/// <summary>
///
/// </summary>
/// <typeparam name="T"></typeparam>
/// <param name="items"></param>
/// <param name="stringbuilder"></param>
/// <param name="rest"></param>
/// <param name="formatProvider"></param>
/// <returns>True if collection was shown completely</returns>
public static bool ShowCollectionValue<T>(ICollectionValue<T> items, StringBuilder stringbuilder, ref int rest, IFormatProvider formatProvider)
{
string startdelim = "{ ", enddelim = " }";
bool showIndexes = false;
bool showMultiplicities = false;
//TODO: do not test here at run time, but select code at compile time
// perhaps by delivering the print type to this metod
IList<T> list;
ICollection<T> coll = items as ICollection<T>;
if ((list = items as IList<T>) != null)
{
startdelim = "[ ";
enddelim = " ]";
//TODO: should have been (items as IIndexed<T>).IndexingSpeed
showIndexes = list.IndexingSpeed == Speed.Constant;
}
else if (coll != null)
{
if (coll.AllowsDuplicates)
{
startdelim = "{{ ";
enddelim = " }}";
if (coll.DuplicatesByCounting)
showMultiplicities = true;
}
}
stringbuilder.Append(startdelim);
rest -= 2 * startdelim.Length;
bool first = true;
bool complete = true;
int index = 0;
if (showMultiplicities)
{
foreach (KeyValuePair<T, int> p in coll.ItemMultiplicities())
{
complete = false;
if (rest <= 0)
break;
if (first)
first = false;
else
{
stringbuilder.Append(", ");
rest -= 2;
}
if (complete = Showing.Show(p.Key, stringbuilder, ref rest, formatProvider))
{
string multiplicityString = string.Format("(*{0})", p.Value);
stringbuilder.Append(multiplicityString);
rest -= multiplicityString.Length;
}
}
}
else
{
foreach (T x in items)
{
complete = false;
if (rest <= 0)
break;
if (first)
first = false;
else
{
stringbuilder.Append(", ");
rest -= 2;
}
if (showIndexes)
{
string indexString = string.Format("{0}:", index++);
stringbuilder.Append(indexString);
rest -= indexString.Length;
}
complete = Showing.Show(x, stringbuilder, ref rest, formatProvider);
}
}
if (!complete)
{
stringbuilder.Append("...");
rest -= 3;
}
stringbuilder.Append(enddelim);
return complete;
}
/// <summary>
///
/// </summary>
/// <typeparam name="K"></typeparam>
/// <typeparam name="V"></typeparam>
///
/// <param name="dictionary"></param>
/// <param name="stringbuilder"></param>
/// <param name="formatProvider"></param>
/// <param name="rest"></param>
/// <returns></returns>
public static bool ShowDictionary<K, V>(IDictionary<K, V> dictionary, StringBuilder stringbuilder, ref int rest, IFormatProvider formatProvider)
{
bool sorted = dictionary is ISortedDictionary<K, V>;
stringbuilder.Append(sorted ? "[ " : "{ ");
rest -= 4; // Account for "( " and " )"
bool first = true;
bool complete = true;
foreach (KeyValuePair<K, V> p in dictionary)
{
complete = false;
if (rest <= 0)
break;
if (first)
first = false;
else
{
stringbuilder.Append(", ");
rest -= 2;
}
complete = Showing.Show(p, stringbuilder, ref rest, formatProvider);
}
if (!complete)
{
stringbuilder.Append("...");
rest -= 3;
}
stringbuilder.Append(sorted ? " ]" : " }");
return complete;
}
}
}
| |
namespace StockSharp.Algo.Strategies.Testing
{
using System;
using System.Collections.Generic;
using System.Linq;
using Ecng.Collections;
using Ecng.Common;
using Ecng.Serialization;
using MoreLinq;
using StockSharp.Algo.Storages;
using StockSharp.Algo.Testing;
using StockSharp.BusinessEntities;
using StockSharp.Messages;
using StockSharp.Logging;
using StockSharp.Localization;
/// <summary>
/// The batch emulator of strategies.
/// </summary>
public class BatchEmulation : BaseLogReceiver
{
private readonly List<HistoryEmulationConnector> _currentConnectors = new();
private IMessageAdapter _histAdapter;
private bool _cancelEmulation;
private readonly SyncObject _sync = new();
private readonly ISecurityProvider _securityProvider;
private readonly IPortfolioProvider _portfolioProvider;
private readonly IExchangeInfoProvider _exchangeInfoProvider;
/// <summary>
/// Initializes a new instance of the <see cref="BatchEmulation"/>.
/// </summary>
/// <param name="securities">Instruments, the operation will be performed with.</param>
/// <param name="portfolios">Portfolios, the operation will be performed with.</param>
/// <param name="storageRegistry">Market data storage.</param>
public BatchEmulation(IEnumerable<Security> securities, IEnumerable<Portfolio> portfolios, IStorageRegistry storageRegistry)
: this(new CollectionSecurityProvider(securities), new CollectionPortfolioProvider(portfolios), new InMemoryExchangeInfoProvider(), storageRegistry, StorageFormats.Binary, storageRegistry.DefaultDrive)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="BatchEmulation"/>.
/// </summary>
/// <param name="securityProvider">The provider of information about instruments.</param>
/// <param name="portfolioProvider">The portfolio to be used to register orders. If value is not given, the portfolio with default name Simulator will be created.</param>
/// <param name="exchangeInfoProvider">Exchanges and trading boards provider.</param>
/// <param name="storageRegistry">Market data storage.</param>
/// <param name="storageFormat">The format of market data. <see cref="StorageFormats.Binary"/> is used by default.</param>
/// <param name="drive">The storage which is used by default. By default, <see cref="IStorageRegistry.DefaultDrive"/> is used.</param>
public BatchEmulation(ISecurityProvider securityProvider, IPortfolioProvider portfolioProvider, IExchangeInfoProvider exchangeInfoProvider, IStorageRegistry storageRegistry, StorageFormats storageFormat = StorageFormats.Binary, IMarketDataDrive drive = null)
{
_securityProvider = securityProvider ?? throw new ArgumentNullException(nameof(securityProvider));
_portfolioProvider = portfolioProvider ?? throw new ArgumentNullException(nameof(portfolioProvider));
_exchangeInfoProvider = exchangeInfoProvider ?? throw new ArgumentNullException(nameof(exchangeInfoProvider));
EmulationSettings = new EmulationSettings();
StorageSettings = new StorageCoreSettings
{
StorageRegistry = storageRegistry,
Drive = drive,
Format = storageFormat,
};
}
/// <summary>
/// Storage settings.
/// </summary>
public StorageCoreSettings StorageSettings { get; }
/// <summary>
/// Emulation settings.
/// </summary>
public EmulationSettings EmulationSettings { get; }
/// <summary>
/// Has the emulator ended its operation due to end of data, or it was interrupted through the <see cref="BatchEmulation.Stop"/>method.
/// </summary>
public bool IsFinished { get; private set; }
private ChannelStates _state = ChannelStates.Stopped;
/// <summary>
/// The emulator state.
/// </summary>
public ChannelStates State
{
get => _state;
private set
{
if (_state == value)
return;
var oldState = _state;
_state = value;
StateChanged?.Invoke(oldState, _state);
}
}
/// <summary>
/// The event on change of paper trade state.
/// </summary>
public event Action<ChannelStates, ChannelStates> StateChanged;
/// <summary>
/// The event of total progress change.
/// </summary>
public event Action<IEnumerable<Strategy>, int> TotalProgressChanged;
/// <summary>
/// The event of single progress change.
/// </summary>
public event Action<Strategy, int> SingleProgressChanged;
/// <summary>
/// Server time changed <see cref="ILogSource.CurrentTime"/>. It passed the time difference since the last call of the event. The first time the event passes the value <see cref="TimeSpan.Zero"/>.
/// </summary>
public event Action<Connector, TimeSpan> MarketTimeChanged;
/// <summary>
/// Start emulation.
/// </summary>
/// <param name="strategies">The strategies.</param>
/// <param name="iterationCount">Iteration count.</param>
public void Start(IEnumerable<Strategy> strategies, int iterationCount)
{
if (strategies is null)
throw new ArgumentNullException(nameof(strategies));
if (iterationCount <= 0)
throw new ArgumentOutOfRangeException(nameof(iterationCount), iterationCount, LocalizedStrings.Str1219);
_cancelEmulation = false;
State = ChannelStates.Starting;
var totalBatches = (int)((decimal)iterationCount / EmulationSettings.BatchSize).Ceiling();
if (totalBatches == 0)
throw new ArgumentOutOfRangeException("totalBatches == 0");
var batchWeight = 100.0 / totalBatches;
TryStartNextBatch(strategies.Batch(EmulationSettings.BatchSize).GetEnumerator(), -1, totalBatches, batchWeight);
}
private void TryStartNextBatch(IEnumerator<IEnumerable<Strategy>> batches,
int currentBatch, int totalBatches, double batchWeight)
{
if (batches is null)
throw new ArgumentNullException(nameof(batches));
lock (_sync)
{
if (_cancelEmulation || !batches.MoveNext())
{
IsFinished = !_cancelEmulation;
State = ChannelStates.Stopping;
State = ChannelStates.Stopped;
if (_histAdapter != null)
{
_histAdapter.Dispose();
_histAdapter = null;
}
return;
}
var batch = batches.Current.ToArray();
currentBatch++;
if (currentBatch == 0)
{
State = ChannelStates.Starting;
State = ChannelStates.Started;
}
InitAdapters(batches, batch, currentBatch, totalBatches, batchWeight);
}
}
private void InitAdapters(IEnumerator<IEnumerable<Strategy>> batches, Strategy[] batch,
int currentBatch, int totalBatches, double batchWeight)
{
if (batch is null)
throw new ArgumentNullException(nameof(batch));
if (batch.Length == 0)
throw new ArgumentOutOfRangeException(nameof(batch));
_histAdapter = new SubscriptionOnlineMessageAdapter(new HistoryMessageAdapter(new IncrementalIdGenerator(), _securityProvider)
{
StorageRegistry = StorageSettings.StorageRegistry,
Drive = StorageSettings.Drive,
StorageFormat = StorageSettings.Format,
StartDate = EmulationSettings.StartTime,
StopDate = EmulationSettings.StopTime,
Parent = this,
});
var progress = new SynchronizedDictionary<HistoryEmulationConnector, int>();
var left = batch.Length;
var nextProgress = 1;
_currentConnectors.Clear();
foreach (var strategy in batch)
{
var inChannel = new InMemoryMessageChannel(new MessageByLocalTimeQueue(), "Emulator in", _histAdapter.AddErrorLog) { SuspendMaxCount = int.MaxValue };
var connector = new HistoryEmulationConnector(_histAdapter, false, inChannel, _securityProvider, _portfolioProvider, _exchangeInfoProvider)
{
Parent = this,
};
connector.EmulationAdapter.Settings.Load(EmulationSettings.Save());
strategy.Connector = connector;
strategy.Reset();
strategy.Start();
progress.Add(connector, 0);
connector.ProgressChanged += step =>
{
SingleProgressChanged?.Invoke(strategy, step);
var avgStep = 0;
lock (progress.SyncRoot)
{
progress[connector] = step;
avgStep = (int)progress.Values.Average();
}
if (avgStep < nextProgress)
return;
nextProgress++;
TotalProgressChanged?.Invoke(batch, (int)(currentBatch * batchWeight + ((avgStep * batchWeight) / 100)));
};
connector.MarketTimeChanged += diff => MarketTimeChanged?.Invoke(connector, diff);
connector.StateChanged += () =>
{
if (connector.State == ChannelStates.Stopped)
{
left--;
if (left == 0)
TryStartNextBatch(batches, currentBatch, totalBatches, batchWeight);
}
};
_currentConnectors.Add(connector);
}
foreach (var connectors in _currentConnectors)
connectors.Connect();
_histAdapter.SendInMessage(new ConnectMessage());
_histAdapter.SendInMessage(new EmulationStateMessage { State = ChannelStates.Starting });
}
/// <summary>
/// To suspend the emulation.
/// </summary>
public void Suspend()
{
lock (_sync)
{
if (State != ChannelStates.Started)
return;
State = ChannelStates.Suspending;
foreach (var connector in _currentConnectors)
{
if (connector.State == ChannelStates.Started)
connector.Suspend();
}
State = ChannelStates.Suspended;
}
}
/// <summary>
/// To resume the emulation.
/// </summary>
public void Resume()
{
lock (_sync)
{
if (State != ChannelStates.Suspended)
return;
State = ChannelStates.Starting;
foreach (var connector in _currentConnectors)
{
if (connector.State == ChannelStates.Suspended)
connector.Start();
}
State = ChannelStates.Started;
}
}
/// <summary>
/// To stop paper trading.
/// </summary>
public void Stop()
{
lock (_sync)
{
if (!(State is ChannelStates.Started or ChannelStates.Suspended))
return;
State = ChannelStates.Stopping;
_cancelEmulation = true;
foreach (var connector in _currentConnectors)
{
if (connector.State is
ChannelStates.Started or
ChannelStates.Starting or
ChannelStates.Suspended or
ChannelStates.Suspending)
connector.Disconnect();
}
_histAdapter.SendInMessage(new EmulationStateMessage { State = ChannelStates.Stopping });
_histAdapter.SendInMessage(new DisconnectMessage());
}
}
}
}
| |
// Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License. See License.txt in the project root for
// license information.
namespace Microsoft.Azure.Search.Models
{
using System;
using System.Collections.Generic;
using System.Linq;
/// <summary>
/// Parameters for filtering, sorting, faceting, paging, and other search
/// query behaviors.
/// </summary>
public class SearchParameters
{
private static readonly IList<string> Empty = new string[0];
/// <summary>
/// Initializes a new instance of the SearchParameters class.
/// </summary>
public SearchParameters() { }
/// <summary>
/// Gets or sets the list of facet expressions to apply to the search query. Each facet expression contains a
/// field name, optionally followed by a comma-separated list of name:value pairs.
/// <see href="https://msdn.microsoft.com/library/azure/dn798927.aspx"/>
/// </summary>
public IList<string> Facets { get; set; }
/// <summary>
/// Gets or sets the OData $filter expression to apply to the search
/// query.
/// </summary>
public string Filter { get; set; }
/// <summary>
/// Gets or sets the list of field names to use for hit highlights.
/// Only searchable fields can be used for hit highlighting.
/// </summary>
public IList<string> HighlightFields { get; set; }
/// <summary>
/// Gets or sets a string tag that is appended to hit highlights. Must
/// be set with HighlightPreTag. Default is &lt;/em&gt;.
/// </summary>
public string HighlightPostTag { get; set; }
/// <summary>
/// Gets or sets a string tag that is prepended to hit highlights.
/// Must be set with HighlightPostTag. Default is &lt;em&gt;.
/// </summary>
public string HighlightPreTag { get; set; }
/// <summary>
/// Gets or sets a value that specifies whether to fetch the total
/// count of results. Default is false. Setting this value to true
/// may have a performance impact. Note that the count returned is an
/// approximation.
/// </summary>
public bool IncludeTotalResultCount { get; set; }
/// <summary>
/// Gets or sets a number between 0 and 100 indicating the percentage
/// of the index that must be covered by a search query in order for
/// the query to be reported as a success. This parameter can be
/// useful for ensuring search availability even for services with
/// only one replica. The default is 100.
/// </summary>
public double? MinimumCoverage { get; set; }
/// <summary>
/// Gets or sets the list of OData $orderby expressions by which to
/// sort the results. Each expression can be either a field name or a
/// call to the geo.distance() function. Each expression can be
/// followed by asc to indicate ascending, and desc to indicate
/// descending. The default is ascending order. Ties will be broken
/// by the match scores of documents. If no OrderBy is specified, the
/// default sort order is descending by document match score. There
/// can be at most 32 Orderby clauses.
/// </summary>
public IList<string> OrderBy { get; set; }
/// <summary>
/// Gets or sets a value that specifies the syntax of the search query.
/// The default is 'simple'. Use 'full' if your query uses the Lucene
/// query syntax. Possible values for this property include: 'simple', 'full'.
/// </summary>
public QueryType QueryType { get; set; }
/// <summary>
/// Gets or sets the list of parameter values to be used in scoring
/// functions (for example, referencePointParameter). Each parameter is
/// a name/value pair encapsulated in a ScoringParameter object.
/// </summary>
public IList<ScoringParameter> ScoringParameters { get; set; }
/// <summary>
/// Gets or sets the name of a scoring profile to evaluate match
/// scores for matching documents in order to sort the results.
/// </summary>
public string ScoringProfile { get; set; }
/// <summary>
/// Gets or sets the list of field names to include in the full-text
/// search.
/// </summary>
public IList<string> SearchFields { get; set; }
/// <summary>
/// Gets or sets a value that specifies whether any or all of the
/// search terms must be matched in order to count the document as a
/// match. Possible values for this property include: 'any', 'all'.
/// </summary>
public SearchMode SearchMode { get; set; }
/// <summary>
/// Gets or sets the list of fields to retrieve. If unspecified, all
/// fields marked as retrievable in the schema are included.
/// </summary>
public IList<string> Select { get; set; }
/// <summary>
/// Gets or sets the number of search results to skip. This value
/// cannot be greater than 100,000. If you need to scan documents in
/// sequence, but cannot use Skip due to this limitation, consider
/// using OrderBy on a totally-ordered key and Filter with a range
/// query instead.
/// </summary>
public int? Skip { get; set; }
/// <summary>
/// Gets or sets the number of search results to retrieve. This can be
/// used in conjunction with Skip to implement client-side paging of
/// search results.
/// </summary>
public int? Top { get; set; }
private IList<string> ScoringParameterStrings
{
get
{
if (this.ScoringParameters == null)
{
return Empty;
}
return ScoringParameters.Select(p => p.ToString()).ToList();
}
}
/// <summary>
/// Converts the SearchParameters instance to a URL query string.
/// </summary>
/// <returns>A URL query string containing all the search parameters.</returns>
public override string ToString()
{
return String.Join("&", this.GetAllOptions());
}
internal SearchParametersPayload ToPayload(string searchText)
{
return new SearchParametersPayload()
{
Count = this.IncludeTotalResultCount,
Facets = this.Facets ?? Empty,
Filter = this.Filter,
Highlight = this.HighlightFields.ToCommaSeparatedString(),
HighlightPostTag = this.HighlightPostTag,
HighlightPreTag = this.HighlightPreTag,
MinimumCoverage = this.MinimumCoverage,
OrderBy = this.OrderBy.ToCommaSeparatedString(),
QueryType = this.QueryType,
ScoringParameters = this.ScoringParameterStrings,
ScoringProfile = this.ScoringProfile,
Search = searchText,
SearchFields = this.SearchFields.ToCommaSeparatedString(),
SearchMode = this.SearchMode,
Select = this.Select.ToCommaSeparatedString(),
Skip = this.Skip,
Top = this.Top
};
}
private IEnumerable<QueryOption> GetAllOptions()
{
yield return new QueryOption("$count", this.IncludeTotalResultCount.ToString().ToLowerInvariant());
foreach (string facetExpr in this.Facets ?? Empty)
{
yield return new QueryOption("facet", Uri.EscapeDataString(facetExpr));
}
if (this.Filter != null)
{
yield return new QueryOption("$filter", Uri.EscapeDataString(this.Filter));
}
if (this.HighlightFields != null && this.HighlightFields.Any())
{
yield return new QueryOption("highlight", this.HighlightFields);
}
if (this.HighlightPreTag != null)
{
yield return new QueryOption("highlightPreTag", Uri.EscapeDataString(this.HighlightPreTag));
}
if (this.HighlightPostTag != null)
{
yield return new QueryOption("highlightPostTag", Uri.EscapeDataString(this.HighlightPostTag));
}
if (this.MinimumCoverage != null)
{
yield return new QueryOption("minimumCoverage", this.MinimumCoverage.ToString());
}
if (this.OrderBy != null && this.OrderBy.Any())
{
yield return new QueryOption("$orderby", this.OrderBy);
}
yield return new QueryOption("queryType", (this.QueryType == Models.QueryType.Simple) ? "simple" : "full");
foreach (string scoringParameterExpr in this.ScoringParameterStrings)
{
yield return new QueryOption("scoringParameter", scoringParameterExpr);
}
if (this.ScoringProfile != null)
{
yield return new QueryOption("scoringProfile", this.ScoringProfile);
}
if (this.SearchFields != null && this.SearchFields.Any())
{
yield return new QueryOption("searchFields", this.SearchFields);
}
yield return new QueryOption("searchMode", (this.SearchMode == Models.SearchMode.Any) ? "any" : "all");
if (this.Select != null && this.Select.Any())
{
yield return new QueryOption("$select", this.Select);
}
if (this.Skip != null)
{
yield return new QueryOption("$skip", this.Skip.ToString());
}
if (this.Top != null)
{
yield return new QueryOption("$top", this.Top.ToString());
}
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.