context
stringlengths 2.52k
185k
| gt
stringclasses 1
value |
---|---|
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Text;
using System.Text.RegularExpressions;
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Http;
using Microsoft.AspNetCore.Mvc;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Options;
using Rotativa.NetCore.Options;
namespace Rotativa.NetCore
{
public abstract class AsResultBase : ActionResult
{
protected AsResultBase()
{
this.WkhtmlPath = string.Empty;
this.FormsAuthenticationCookieName = ".ASPXAUTH";
}
/// <summary>
/// This will be send to the browser as a name of the generated PDF file.
/// </summary>
public string FileName { get; set; }
/// <summary>
/// Path to wkhtmltopdf\wkhtmltoimage binary.
/// </summary>
public string WkhtmlPath { get; set; }
/// <summary>
/// Custom name of authentication cookie used by forms authentication.
/// </summary>
[Obsolete("Use FormsAuthenticationCookieName instead of CookieName.")]
public string CookieName
{
get => this.FormsAuthenticationCookieName;
set => this.FormsAuthenticationCookieName = value;
}
/// <summary>
/// Custom name of authentication cookie used by forms authentication.
/// </summary>
public string FormsAuthenticationCookieName { get; set; }
/// <summary>
/// Sets custom headers.
/// </summary>
[OptionFlag("--custom-header")]
public Dictionary<string, string> CustomHeaders { get; set; }
/// <summary>
/// Sets cookies.
/// </summary>
[OptionFlag("--cookie")]
public Dictionary<string, string> Cookies { get; set; }
/// <summary>
/// Sets post values.
/// </summary>
[OptionFlag("--post")]
public Dictionary<string, string> Post { get; set; }
/// <summary>
/// Indicates whether the page can run JavaScript.
/// </summary>
[OptionFlag("-n")]
public bool IsJavaScriptDisabled { get; set; }
/// <summary>
/// Minimum font size.
/// </summary>
[OptionFlag("--minimum-font-size")]
public int? MinimumFontSize { get; set; }
/// <summary>
/// Sets proxy server.
/// </summary>
[OptionFlag("-p")]
public string Proxy { get; set; }
/// <summary>
/// HTTP Authentication username.
/// </summary>
[OptionFlag("--username")]
public string UserName { get; set; }
/// <summary>
/// HTTP Authentication password.
/// </summary>
[OptionFlag("--password")]
public string Password { get; set; }
/// <summary>
/// Use this if you need another switches that are not currently supported by Rotativa.
/// </summary>
[OptionFlag("")]
public string CustomSwitches { get; set; }
public string SaveOnServerPath { get; set; }
protected abstract string GetUrl(ActionContext context);
/// <summary>
/// Returns properties with OptionFlag attribute as one line that can be passed to wkhtmltopdf binary.
/// </summary>
/// <returns>Command line parameter that can be directly passed to wkhtmltopdf binary.</returns>
protected virtual string GetConvertOptions()
{
var result = new StringBuilder();
var fields = this.GetType().GetProperties();
foreach (var fi in fields)
{
var of = fi.GetCustomAttributes(typeof(OptionFlag), true).FirstOrDefault() as OptionFlag;
if (of == null)
continue;
object value = fi.GetValue(this, null);
if (value == null)
continue;
if (fi.PropertyType == typeof(Dictionary<string, string>))
{
var dictionary = (Dictionary<string, string>)value;
foreach (var d in dictionary)
{
result.AppendFormat(" {0} {1} {2}", of.Name, d.Key, d.Value);
}
}
else if (fi.PropertyType == typeof(bool))
{
if ((bool)value)
result.AppendFormat(CultureInfo.InvariantCulture, " {0}", of.Name);
}
else
{
result.AppendFormat(CultureInfo.InvariantCulture, " {0} {1}", of.Name, value);
}
}
return result.ToString().Trim();
}
private string GetWkParams(ActionContext context)
{
var switches = string.Empty;
var cookieOptions = context.HttpContext.RequestServices.GetService<IOptions<CookieAuthenticationOptions>>();
if (cookieOptions.Value != null && !String.IsNullOrEmpty(cookieOptions.Value.CookieName))
{
var cookieName = cookieOptions.Value.CookieName;
string authenticationCookie = null;
if (context.HttpContext.Request.Cookies != null && context.HttpContext.Request.Cookies.ContainsKey(cookieName))
{
authenticationCookie = context.HttpContext.Request.Cookies[cookieName];
}
if (authenticationCookie != null)
{
var authCookieValue = authenticationCookie;
switches += " --cookie " + this.FormsAuthenticationCookieName + " " + authCookieValue;
}
}
switches += " " + this.GetConvertOptions();
var url = this.GetUrl(context);
switches += " " + url;
return switches;
}
protected virtual byte[] CallTheDriver(ActionContext context)
{
var switches = this.GetWkParams(context);
var fileContent = this.WkhtmlConvert(switches);
return fileContent;
}
protected abstract byte[] WkhtmlConvert(string switches);
public byte[] BuildFile(ActionContext context)
{
if (context == null)
{
throw new ArgumentNullException(nameof(context));
}
if (this.WkhtmlPath == string.Empty)
{
var location = Assembly.GetEntryAssembly().Location;
var directory = Path.GetDirectoryName(location);
this.WkhtmlPath = Path.Combine(directory, "Rotativa");
}
var fileContent = this.CallTheDriver(context);
if (string.IsNullOrEmpty(this.SaveOnServerPath) == false)
{
File.WriteAllBytes(this.SaveOnServerPath, fileContent);
}
return fileContent;
}
public override void ExecuteResult(ActionContext context)
{
var fileContent = this.BuildFile(context);
var response = this.PrepareResponse(context.HttpContext.Response);
response.Body.Write(fileContent, 0, fileContent.Length);
}
private static string SanitizeFileName(string name)
{
string invalidChars = Regex.Escape(new string(Path.GetInvalidPathChars()) + new string(Path.GetInvalidFileNameChars()));
string invalidCharsPattern = string.Format(@"[{0}]+", invalidChars);
string result = Regex.Replace(name, invalidCharsPattern, "_");
return result;
}
protected HttpResponse PrepareResponse(HttpResponse response)
{
response.ContentType = this.GetContentType();
if (!string.IsNullOrEmpty(this.FileName))
{
response.Headers.Add("Content-Disposition", string.Format("attachment; filename=\"{0}\"", SanitizeFileName(this.FileName)));
}
return response;
}
protected abstract string GetContentType();
}
}
| |
/*
*
* (c) Copyright Ascensio System Limited 2010-2021
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*
*/
namespace ASC.Mail.Net.SIP.Message
{
#region usings
using System;
#endregion
/// <summary>
/// Implements SIP "directive" value. Defined in RFC 3841.
/// </summary>
/// <remarks>
/// <code>
/// RFC 3841 Syntax:
/// directive = proxy-directive / cancel-directive / fork-directive / recurse-directive /
/// parallel-directive / queue-directive
/// proxy-directive = "proxy" / "redirect"
/// cancel-directive = "cancel" / "no-cancel"
/// fork-directive = "fork" / "no-fork"
/// recurse-directive = "recurse" / "no-recurse"
/// parallel-directive = "parallel" / "sequential"
/// queue-directive = "queue" / "no-queue"
/// </code>
/// </remarks>
public class SIP_t_Directive : SIP_t_Value
{
#region DirectiveType enum
/// <summary>
/// Proccess directives. Defined in rfc 3841 9.1.
/// </summary>
public enum DirectiveType
{
/// <summary>
/// This directive indicates whether the caller would like each server to proxy request.
/// </summary>
Proxy,
/// <summary>
/// This directive indicates whether the caller would like each server to redirect request.
/// </summary>
Redirect,
/// <summary>
/// This directive indicates whether the caller would like each proxy server to send a CANCEL
/// request to forked branches.
/// </summary>
Cancel,
/// <summary>
/// This directive indicates whether the caller would NOT want each proxy server to send a CANCEL
/// request to forked branches.
/// </summary>
NoCancel,
/// <summary>
/// This type of directive indicates whether a proxy should fork a request.
/// </summary>
Fork,
/// <summary>
/// This type of directive indicates whether a proxy should proxy to only a single address.
/// The server SHOULD proxy the request to the "best" address (generally the one with the highest q-value).
/// </summary>
NoFork,
/// <summary>
/// This directive indicates whether a proxy server receiving a 3xx response should send
/// requests to the addresses listed in the response.
/// </summary>
Recurse,
/// <summary>
/// This directive indicates whether a proxy server receiving a 3xx response should forward
/// the list of addresses upstream towards the caller.
/// </summary>
NoRecurse,
/// <summary>
/// This directive indicates whether the caller would like the proxy server to proxy
/// the request to all known addresses at once.
/// </summary>
Parallel,
/// <summary>
/// This directive indicates whether the caller would like the proxy server to go through
/// all known addresses sequentially, contacting the next address only after it has received
/// a non-2xx or non-6xx final response for the previous one.
/// </summary>
Sequential,
/// <summary>
/// This directive indicates whether if the called party is temporarily unreachable, caller
/// wants to have its call queued.
/// </summary>
Queue,
/// <summary>
/// This directive indicates whether if the called party is temporarily unreachable, caller
/// don't want its call to be queued.
/// </summary>
NoQueue
}
#endregion
#region Members
private DirectiveType m_Directive = DirectiveType.Fork;
#endregion
#region Properties
/// <summary>
/// Gets or sets directive.
/// </summary>
public DirectiveType Directive
{
get { return m_Directive; }
set { m_Directive = value; }
}
#endregion
#region Methods
/// <summary>
/// Parses "directive" from specified value.
/// </summary>
/// <param name="value">SIP "directive" value.</param>
/// <exception cref="ArgumentNullException">Raised when <b>value</b> is null.</exception>
/// <exception cref="SIP_ParseException">Raised when invalid SIP message.</exception>
public void Parse(string value)
{
if (value == null)
{
throw new ArgumentNullException("value");
}
Parse(new StringReader(value));
}
/// <summary>
/// Parses "directive" from specified reader.
/// </summary>
/// <param name="reader">Reader from where to parse.</param>
/// <exception cref="ArgumentNullException">Raised when <b>reader</b> is null.</exception>
/// <exception cref="SIP_ParseException">Raised when invalid SIP message.</exception>
public override void Parse(StringReader reader)
{
/*
directive = proxy-directive / cancel-directive / fork-directive / recurse-directive /
parallel-directive / queue-directive
proxy-directive = "proxy" / "redirect"
cancel-directive = "cancel" / "no-cancel"
fork-directive = "fork" / "no-fork"
recurse-directive = "recurse" / "no-recurse"
parallel-directive = "parallel" / "sequential"
queue-directive = "queue" / "no-queue"
*/
if (reader == null)
{
throw new ArgumentNullException("reader");
}
// Get Method
string word = reader.ReadWord();
if (word == null)
{
throw new SIP_ParseException("'directive' value is missing !");
}
if (word.ToLower() == "proxy")
{
m_Directive = DirectiveType.Proxy;
}
else if (word.ToLower() == "redirect")
{
m_Directive = DirectiveType.Redirect;
}
else if (word.ToLower() == "cancel")
{
m_Directive = DirectiveType.Cancel;
}
else if (word.ToLower() == "no-cancel")
{
m_Directive = DirectiveType.NoCancel;
}
else if (word.ToLower() == "fork")
{
m_Directive = DirectiveType.Fork;
}
else if (word.ToLower() == "no-fork")
{
m_Directive = DirectiveType.NoFork;
}
else if (word.ToLower() == "recurse")
{
m_Directive = DirectiveType.Recurse;
}
else if (word.ToLower() == "no-recurse")
{
m_Directive = DirectiveType.NoRecurse;
}
else if (word.ToLower() == "parallel")
{
m_Directive = DirectiveType.Parallel;
}
else if (word.ToLower() == "sequential")
{
m_Directive = DirectiveType.Sequential;
}
else if (word.ToLower() == "queue")
{
m_Directive = DirectiveType.Queue;
}
else if (word.ToLower() == "no-queue")
{
m_Directive = DirectiveType.NoQueue;
}
else
{
throw new SIP_ParseException("Invalid 'directive' value !");
}
}
/// <summary>
/// Converts this to valid "directive" value.
/// </summary>
/// <returns>Returns "directive" value.</returns>
public override string ToStringValue()
{
/*
directive = proxy-directive / cancel-directive / fork-directive / recurse-directive /
parallel-directive / queue-directive
proxy-directive = "proxy" / "redirect"
cancel-directive = "cancel" / "no-cancel"
fork-directive = "fork" / "no-fork"
recurse-directive = "recurse" / "no-recurse"
parallel-directive = "parallel" / "sequential"
queue-directive = "queue" / "no-queue"
*/
if (m_Directive == DirectiveType.Proxy)
{
return "proxy";
}
else if (m_Directive == DirectiveType.Redirect)
{
return "redirect";
}
else if (m_Directive == DirectiveType.Cancel)
{
return "cancel";
}
else if (m_Directive == DirectiveType.NoCancel)
{
return "no-cancel";
}
else if (m_Directive == DirectiveType.Fork)
{
return "fork";
}
else if (m_Directive == DirectiveType.NoFork)
{
return "no-fork";
}
else if (m_Directive == DirectiveType.Recurse)
{
return "recurse";
}
else if (m_Directive == DirectiveType.NoRecurse)
{
return "no-recurse";
}
else if (m_Directive == DirectiveType.Parallel)
{
return "parallel";
}
else if (m_Directive == DirectiveType.Sequential)
{
return "sequential";
}
else if (m_Directive == DirectiveType.Queue)
{
return "queue";
}
else if (m_Directive == DirectiveType.NoQueue)
{
return "no-queue";
}
else
{
throw new ArgumentException("Invalid property Directive value, this should never happen !");
}
}
#endregion
}
}
| |
// Copyright (c) 2021 Alachisoft
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Threading;
using System.Xml;
namespace Alachisoft.NCache.Client
{
/// <summary>
/// Contains the
/// </summary>
class ClientConfiguration
{
#region Fields
private bool _balanceNodes = true; //true by default. decision made by Sir Iqbal. Mail dated Mon 6/9/2008
private bool _loadServersFromConfigFile = true;
private int _serverPort = 9800;
private int _clientRequestTimeout = 90 * 1000;
private int _connectionTimeout = 5000;
private int _retries = 5;
private int _retryInternal = 1000;
private int _itemSizeThreshHold = 0;
private int _keepAliveInterval = 30;
private double _retryConnectionDelay = 10; // 10 min
private string _cacheId;
private string _defaultReadThruProvider;
private string _defaultWriteThruProvider;
private ArrayList _servers = new ArrayList();
private Hashtable _compactTypes = new Hashtable();
internal CacheConnectionOptions _cacheConnectionOptions;
private Hashtable _mappedServer = new Hashtable();
private Search _search = Search.LocalSearch;
private Search _result = Search.LocalSearch;
#endregion
public ClientConfiguration(string cacheId)
{
_cacheId = cacheId;
_cacheConnectionOptions = null;
}
internal ClientConfiguration(string cacheId, CacheConnectionOptions cacheConnectionOptions)
{
_cacheId = cacheId;
_cacheConnectionOptions = (CacheConnectionOptions)cacheConnectionOptions.Clone();
if (_cacheConnectionOptions != null)
{
if (_cacheConnectionOptions.ServerList != null && _cacheConnectionOptions.ServerList.Count > 0)
{
foreach (var serverInfo in _cacheConnectionOptions.ServerList)
{
serverInfo.IsUserProvided = true;
AddServer(serverInfo);
}
_loadServersFromConfigFile = false;
}
}
}
public bool EnableClientLogs { get; set; }
public bool EnableDetailedClientLogs { get; set; }
public bool IPMappingConfigured { get; set; }
public int ServerCount { get { return _servers.Count; } }
public int CurrentServerIndex { get; set; }
public int ItemSizeThreshHold { get { return _itemSizeThreshHold; } }
public bool BalanceNodes
{
get
{
if (_cacheConnectionOptions != null && _cacheConnectionOptions.LoadBalance.HasValue)
return _cacheConnectionOptions.LoadBalance.Value;
return _balanceNodes;
}
set { _balanceNodes = value; }
}
public bool ImportHashmap { get; set; } = true ;
public string BindIP
{
get
{
if (_cacheConnectionOptions != null && _cacheConnectionOptions.IsSet(ConnectionStrings.BINDIP))
return _cacheConnectionOptions.ClientBindIP;
string bindIP = string.Empty;
string fileName = null;
FileStream fs = null;
try
{
fileName = DirectoryUtil.GetBaseFilePath("client.ncconf");
if (fileName == null)
{
return bindIP;
}
XmlDocument configuration = new XmlDocument();
FileInfo fileInfo = new FileInfo(fileName);
fs = fileInfo.OpenRead();
configuration.Load(fs);
XmlNodeList clientNodesServer = configuration.GetElementsByTagName("ncache-server");
XmlNodeList clientNodesClient = configuration.GetElementsByTagName("ncache-client");
XmlNodeList clientNodes = (clientNodesClient!=null&&clientNodesClient.Count > 0) ? clientNodesClient : clientNodesServer;
CultureInfo cultureInfo = Thread.CurrentThread.CurrentCulture;
try
{
Thread.CurrentThread.CurrentCulture = new CultureInfo("en-US");
if (clientNodes != null && clientNodes.Count > 0)
{
XmlNode childNode = clientNodes.Item(0);
if (childNode != null)
{
XmlAttributeCollection attributes = childNode.Attributes;
if (attributes != null)
{
string currentAttrib = string.Empty;
if (attributes["local-server-ip"] != null)
{
currentAttrib = attributes["local-server-ip"].Value;
if (currentAttrib != null)
{
bindIP = Convert.ToString(currentAttrib);
}
}
}
}
}
}
finally
{
Thread.CurrentThread.CurrentCulture = cultureInfo;
}
return bindIP;
}
catch (Exception ex)
{
return bindIP;
}
finally
{
if (fs != null) fs.Close();
}
}
}
public ArrayList ServerList
{
get
{
lock (_servers.SyncRoot)
{
return _servers.Clone() as ArrayList;
}
}
}
public ServerInfo GetServerAt(int index)
{
ServerInfo nextServer = null;
lock (_servers.SyncRoot)
{
if (_servers != null && _servers.Count > 0 && index <_servers.Count)
{
nextServer = _servers[index] as ServerInfo;
}
}
return nextServer;
}
public ServerInfo NextServer
{
get
{
ServerInfo nextServer = null;
lock (_servers.SyncRoot)
{
if (_servers != null && _servers.Count > 0)
{
//if servers are removed from the server list, there is
//a chance that _current server is greater than total
//present servers. indexoutofrange exception can occur.
//therefore reset the _current server.
if (CurrentServerIndex > _servers.Count) CurrentServerIndex = 0;
if (CurrentServerIndex < 0) CurrentServerIndex = 0;
nextServer = _servers[CurrentServerIndex] as ServerInfo;
CurrentServerIndex++;
if (CurrentServerIndex > _servers.Count - 1) CurrentServerIndex = 0;
}
}
return nextServer;
}
}
public int ConfigServerPort { get; private set; } = 9800;
public int ClientRequestTimeout
{
get
{
if (_cacheConnectionOptions != null && _cacheConnectionOptions.IsSet(ConnectionStrings.CLIENTREQUESTOPTIME))
return Convert.ToInt32(_cacheConnectionOptions.ClientRequestTimeOut.Value.TotalMilliseconds);
return _clientRequestTimeout;
}
}
public int ConnectionTimeout
{
get
{
if (_cacheConnectionOptions != null && _cacheConnectionOptions.IsSet(ConnectionStrings.CONNECTIONTIMEOUT))
return Convert.ToInt32(_cacheConnectionOptions.ConnectionTimeout.Value.TotalMilliseconds);
return _connectionTimeout;
}
}
internal bool EnableKeepAlive { get; set; } = false;
internal int KeepAliveInterval { get; private set; } = 30;
public bool EnableClientLog { get; private set; }
public LogLevel LogLevels { get; private set; }
public bool EnablePipelining { get; private set; }
public int PipeliningTimeout { get; private set; }
internal int ServerPort
{
get
{
if (_cacheConnectionOptions != null && _cacheConnectionOptions.IsSet(ConnectionStrings.PORT))
return _cacheConnectionOptions.Port.Value;
return _serverPort;
}
set
{
_serverPort = value;
}
}
public void AddServer(ServerInfo server)
{
if (_servers != null && (!string.IsNullOrEmpty(server.Name) || server.IP != null))
{
lock (_servers.SyncRoot)
{
if (!_servers.Contains(server))
{
_servers.Add(server);
}
}
}
}
public void RemoveServer(ServerInfo server)
{
if (_servers != null && server != null)
{
lock (_servers.SyncRoot)
{
if (_servers.Contains(server))
{
ServerInfo existingServer = (ServerInfo)_servers[_servers.IndexOf(server)];
if (!existingServer.IsUserProvided)
{
if (CurrentServerIndex == (_servers.Count - 1))
CurrentServerIndex--;
_servers.Remove(server);
}
}
}
}
}
public ServerInfo GetMappedServer(string ip, int port)
{
ServerInfo mapping = null;
if (_mappedServer != null || _mappedServer.Count != 0)
{
foreach (ServerInfo rm in _mappedServer.Keys)
{
if (rm.Name.Equals(ip))
{
mapping = new ServerInfo();
mapping = (ServerInfo)_mappedServer[rm];
}
}
}
//Incase the map is null the method will return the original IP and Port
if (mapping == null)
mapping = new ServerInfo(ip, port);
return mapping;
}
public void LoadConfiguration()
{
FileStream fs = null;
string c_configFileName = null;
XmlDocument configuration = new XmlDocument();
try
{
if (_cacheId == null) return;
c_configFileName = DirectoryUtil.GetBaseFilePath("client.ncconf", _search, out _result);
if (c_configFileName == null) return;
FileInfo fileInfo = new FileInfo(c_configFileName);
fs = fileInfo.OpenRead();
configuration.Load(fs);
fileInfo = null;
bool serverPortFound = false;
XmlNodeList clientInfoTag = configuration.GetElementsByTagName("ncache-server");
CultureInfo cultureInfo = Thread.CurrentThread.CurrentCulture;
try
{
Thread.CurrentThread.CurrentCulture = new CultureInfo("en-US");
if (clientInfoTag != null && clientInfoTag.Count > 0)
{
XmlNode portNode = clientInfoTag.Item(0);
if (portNode != null)
{
XmlAttributeCollection attributes = portNode.Attributes;
if (attributes != null)
{
string currentAttrib = string.Empty;
if (attributes["port"] != null && attributes["port"].Value != null)
{
ConfigServerPort = Convert.ToInt32(attributes["port"].Value);
}
if (_cacheConnectionOptions != null && _cacheConnectionOptions.Port.HasValue)
{
_serverPort = _cacheConnectionOptions.Port.Value;
}
else
{
_serverPort = ConfigServerPort;
}
if (_cacheConnectionOptions != null && _cacheConnectionOptions.ClientRequestTimeOut.HasValue)
{
_clientRequestTimeout = Convert.ToInt32(_cacheConnectionOptions.ClientRequestTimeOut.Value.TotalMilliseconds);
}
else
{
if (attributes["client-request-timeout"] != null && attributes["client-request-timeout"].Value != null)
{
_clientRequestTimeout = Convert.ToInt32(attributes["client-request-timeout"].Value) * 1000;
}
}
if (_cacheConnectionOptions != null && _cacheConnectionOptions.ConnectionTimeout.HasValue)
{
_connectionTimeout = Convert.ToInt32(_cacheConnectionOptions.ConnectionTimeout.Value.TotalMilliseconds);
}
else
{
if (attributes["connection-timeout"] != null && attributes["connection-timeout"].Value != null)
_connectionTimeout = Convert.ToInt32(attributes["connection-timeout"].Value) * 1000;
}
}
serverPortFound = true;
}
}
if (!serverPortFound)
throw new Runtime.Exceptions.ConfigurationException("ncache-server missing in client confiugration");
XmlNodeList cacheList = configuration.GetElementsByTagName("cache");
XmlNodeList cacheConfig = null;
for (int i = 0; i < cacheList.Count; i++)
{
XmlNode cache = cacheList.Item(i);
if (cache.Attributes.GetNamedItem("id").Value.ToLower().Equals(_cacheId.ToLower()))
{
if (cache.Attributes["load-balance"] != null)
{
_balanceNodes = Convert.ToBoolean(cache.Attributes["load-balance"].Value);
}
if (_cacheConnectionOptions != null && _cacheConnectionOptions.LoadBalance.HasValue)
{
_balanceNodes = _cacheConnectionOptions.LoadBalance.Value;
}
if (_cacheConnectionOptions != null && _cacheConnectionOptions.EnableClientLogs.HasValue)
{
EnableClientLogs = _cacheConnectionOptions.EnableClientLogs.Value;
}
else
{
if (cache.Attributes["enable-client-logs"] != null)
EnableClientLogs = Convert.ToBoolean(cache.Attributes["enable-client-logs"].Value.ToString());
}
if (_cacheConnectionOptions != null && _cacheConnectionOptions.LogLevel.HasValue)
{
LogLevels = _cacheConnectionOptions.LogLevel.Value;
switch (LogLevels)
{
case LogLevel.Debug:
case LogLevel.Info:
EnableDetailedClientLogs = true;
break;
case LogLevel.Error:
EnableDetailedClientLogs = false;
break;
}
}
else
{
if (cache.Attributes["log-level"] != null)
{
var logLevel = cache.Attributes["log-level"].Value.ToString().ToLower();
switch (logLevel)
{
case "info":
LogLevels = LogLevel.Info;
EnableDetailedClientLogs = true;
break;
case "debug":
LogLevels = LogLevel.Debug;
EnableDetailedClientLogs = true;
break;
case "error":
LogLevels = LogLevel.Error;
EnableDetailedClientLogs = false;
break;
}
}
}
ImportHashmap = true;
cacheConfig = cache.ChildNodes;
break;
}
}
if (cacheConfig == null)
{
if (!string.IsNullOrEmpty(_cacheId))
{
if (_result != Search.GlobalSearch)
{
_search = _result + 1;
LoadConfiguration();
}
}
return;
}
_search = _result;
LoadRemoteServerMappingConfig(cacheConfig);
LoadRemoteServerConfig(cacheConfig);
}
finally
{
Thread.CurrentThread.CurrentCulture = cultureInfo;
}
}
catch (Runtime.Exceptions.ConfigurationException)
{
throw;
}
catch (IOException) { throw; }
catch (Exception e)
{
throw new Runtime.Exceptions.ConfigurationException("An error occurred while reading client.ncconf. " + e.Message);
}
finally
{
if (fs != null) fs.Close();
}
}
private void LoadRemoteServerConfig(XmlNodeList cacheConfig)
{
try
{
if (!_loadServersFromConfigFile) return;
int PriorityCounter = 1;
for (int i = 0; i < cacheConfig.Count; i++)
{
XmlNode currentConfig = cacheConfig.Item(i);
if (currentConfig.Name.Equals("server"))
{
ServerInfo remoteServer = new ServerInfo();
try
{
remoteServer.Name = currentConfig.Attributes["name"].InnerText;
remoteServer.Priority = Convert.ToInt16(PriorityCounter);
PriorityCounter = PriorityCounter + 1;
if (currentConfig.Attributes.GetNamedItem("port-range") != null)
remoteServer.PortRange = Convert.ToInt16(currentConfig.Attributes["port-range"].InnerText);
}
catch (Exception) { }
remoteServer.Port = ServerPort;
if ((remoteServer.Name != null || remoteServer.IP != null) && remoteServer.Port != -1)
{
lock (_servers.SyncRoot)
{
if (!_servers.Contains(remoteServer))
{
if (_mappedServer != null && _mappedServer.Count != 0)
{
ServerInfo rm = GetMappedServer(remoteServer.Name, remoteServer.Port);
remoteServer.Name = rm.Name;
remoteServer.Port = rm.Port;
if (!_servers.Contains(remoteServer))
_servers.Add(remoteServer);
}
else
{
remoteServer.IsUserProvided = true;
_servers.Add(remoteServer);
}
}
}
}
}
}
lock (_servers.SyncRoot)
{
_servers.Sort();
}
}
catch (Exception) { }
}
private void LoadRemoteServerMappingConfig(XmlNodeList cacheConfig)
{
Hashtable updatedServerMap = new Hashtable();
try
{
for (int i = 0; i < cacheConfig.Count; i++)
{
XmlNode currentConfig = cacheConfig.Item(i);
if (currentConfig.Name.Equals("server-end-point"))
{
XmlNodeList _mappingConfig = currentConfig.ChildNodes;
for (int j = 0; j < _mappingConfig.Count; j++)
{
XmlNode mapNodeConfig = _mappingConfig.Item(j);
if (mapNodeConfig.Name.Equals("end-point"))
{
ServerInfo publicServer = new ServerInfo();
ServerInfo privateServer = new ServerInfo();
try
{
privateServer.Name = mapNodeConfig.Attributes["private-ip"].InnerText;
privateServer.Port = Convert.ToInt32(mapNodeConfig.Attributes["private-port"].InnerText);
publicServer.Name = mapNodeConfig.Attributes["public-ip"].InnerText;
publicServer.Port = Convert.ToInt32(mapNodeConfig.Attributes["public-port"].InnerText);
}
catch (Exception) { }
if (privateServer.Name != null)
{
lock (_mappedServer.SyncRoot)
{
if (_mappedServer.Count != 0)
{
foreach (ServerInfo rm in _mappedServer.Keys)
{
if (!rm.Name.Equals(privateServer.Name))
{
updatedServerMap.Add(privateServer, publicServer);
}
}
}
else
{
_mappedServer.Add(privateServer, publicServer);
}
}
}
}
}
IPMappingConfigured = true;
}
}
foreach (ServerInfo rms in updatedServerMap.Keys)
{
_mappedServer.Add(rms, updatedServerMap[rms]);
}
}
catch (Exception) { }
}
internal void AddMappedServers(List<Config.Mapping> mappedServerList)
{
Hashtable updatedServerMap = new Hashtable();
if (mappedServerList != null)
{
foreach (Config.Mapping node in mappedServerList)
{
ServerInfo publicServer = new ServerInfo();
ServerInfo privateServer = new ServerInfo();
privateServer.Name = node.PrivateIP;
privateServer.Port = node.PrivatePort;
publicServer.Name = node.PublicIP;
publicServer.Port = node.PublicPort;
if (privateServer.Name != null)
{
lock (_mappedServer.SyncRoot)
{
if (_mappedServer.Count != 0)
{
bool keyExists = false;
foreach (ServerInfo rm in _mappedServer.Keys)
{
if (!rm.Name.Equals(privateServer.Name))
{
keyExists = false;
}
else
{
keyExists = true;
ServerInfo originalPublicServer = (ServerInfo)_mappedServer[rm];
string existingServer = originalPublicServer.Name + ":" + originalPublicServer.Port.ToString();
string newServer = publicServer.Name + ":" + publicServer.Port.ToString();
if (!existingServer.Equals(newServer))
{
updatedServerMap.Add(privateServer, publicServer);
}
break;
}
}
if (keyExists == false)
{
updatedServerMap.Add(privateServer, publicServer);
}
}
else
{
_mappedServer.Add(privateServer, publicServer);
}
}
}
}
foreach (ServerInfo rms in updatedServerMap.Keys)
{
_mappedServer[rms] = updatedServerMap[rms];
}
}
}
internal bool IsDifferentParamPort(int usedParamPort)
{
if (usedParamPort == ConfigServerPort)
return false;
return true;
}
}
}
| |
#if (UNITY_WINRT || UNITY_WP_8_1) && !UNITY_EDITOR && !UNITY_WP8
#region License
// Copyright (c) 2007 James Newton-King
//
// Permission is hereby granted, free of charge, to any person
// obtaining a copy of this software and associated documentation
// files (the "Software"), to deal in the Software without
// restriction, including without limitation the rights to use,
// copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following
// conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
// OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
// HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
// WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
// OTHER DEALINGS IN THE SOFTWARE.
#endregion
using System;
using System.IO;
using System.Globalization;
using System.Numerics;
using System.Threading.Tasks;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Utilities;
using System.Xml;
using Newtonsoft.Json.Converters;
using Newtonsoft.Json.Serialization;
using System.Text;
using System.Xml.Linq;
namespace Newtonsoft.Json
{
/// <summary>
/// Provides methods for converting between common language runtime types and JSON types.
/// </summary>
/// <example>
/// <code lang="cs" source="..\Src\Newtonsoft.Json.Tests\Documentation\SerializationTests.cs" region="SerializeObject" title="Serializing and Deserializing JSON with JsonConvert" />
/// </example>
public static class JsonConvert
{
/// <summary>
/// Gets or sets a function that creates default <see cref="JsonSerializerSettings"/>.
/// Default settings are automatically used by serialization methods on <see cref="JsonConvert"/>,
/// and <see cref="JToken.ToObject{T}()"/> and <see cref="JToken.FromObject(object)"/> on <see cref="JToken"/>.
/// To serialize without using any default settings create a <see cref="JsonSerializer"/> with
/// <see cref="JsonSerializer.Create()"/>.
/// </summary>
public static Func<JsonSerializerSettings> DefaultSettings { get; set; }
/// <summary>
/// Represents JavaScript's boolean value true as a string. This field is read-only.
/// </summary>
public static readonly string True = "true";
/// <summary>
/// Represents JavaScript's boolean value false as a string. This field is read-only.
/// </summary>
public static readonly string False = "false";
/// <summary>
/// Represents JavaScript's null as a string. This field is read-only.
/// </summary>
public static readonly string Null = "null";
/// <summary>
/// Represents JavaScript's undefined as a string. This field is read-only.
/// </summary>
public static readonly string Undefined = "undefined";
/// <summary>
/// Represents JavaScript's positive infinity as a string. This field is read-only.
/// </summary>
public static readonly string PositiveInfinity = "Infinity";
/// <summary>
/// Represents JavaScript's negative infinity as a string. This field is read-only.
/// </summary>
public static readonly string NegativeInfinity = "-Infinity";
/// <summary>
/// Represents JavaScript's NaN as a string. This field is read-only.
/// </summary>
public static readonly string NaN = "NaN";
/// <summary>
/// Converts the <see cref="DateTime"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="DateTime"/>.</returns>
public static string ToString(DateTime value)
{
return ToString(value, DateFormatHandling.IsoDateFormat, DateTimeZoneHandling.RoundtripKind);
}
/// <summary>
/// Converts the <see cref="DateTime"/> to its JSON string representation using the <see cref="DateFormatHandling"/> specified.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <param name="format">The format the date will be converted to.</param>
/// <param name="timeZoneHandling">The time zone handling when the date is converted to a string.</param>
/// <returns>A JSON string representation of the <see cref="DateTime"/>.</returns>
public static string ToString(DateTime value, DateFormatHandling format, DateTimeZoneHandling timeZoneHandling)
{
DateTime updatedDateTime = DateTimeUtils.EnsureDateTime(value, timeZoneHandling);
using (StringWriter writer = StringUtils.CreateStringWriter(64))
{
writer.Write('"');
DateTimeUtils.WriteDateTimeString(writer, updatedDateTime, format, null, CultureInfo.InvariantCulture);
writer.Write('"');
return writer.ToString();
}
}
/// <summary>
/// Converts the <see cref="DateTimeOffset"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="DateTimeOffset"/>.</returns>
public static string ToString(DateTimeOffset value)
{
return ToString(value, DateFormatHandling.IsoDateFormat);
}
/// <summary>
/// Converts the <see cref="DateTimeOffset"/> to its JSON string representation using the <see cref="DateFormatHandling"/> specified.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <param name="format">The format the date will be converted to.</param>
/// <returns>A JSON string representation of the <see cref="DateTimeOffset"/>.</returns>
public static string ToString(DateTimeOffset value, DateFormatHandling format)
{
using (StringWriter writer = StringUtils.CreateStringWriter(64))
{
writer.Write('"');
DateTimeUtils.WriteDateTimeOffsetString(writer, value, format, null, CultureInfo.InvariantCulture);
writer.Write('"');
return writer.ToString();
}
}
/// <summary>
/// Converts the <see cref="Boolean"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Boolean"/>.</returns>
public static string ToString(bool value)
{
return (value) ? True : False;
}
/// <summary>
/// Converts the <see cref="Char"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Char"/>.</returns>
public static string ToString(char value)
{
return ToString(char.ToString(value));
}
/// <summary>
/// Converts the <see cref="Enum"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Enum"/>.</returns>
public static string ToString(Enum value)
{
return value.ToString("D");
}
/// <summary>
/// Converts the <see cref="Int32"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Int32"/>.</returns>
public static string ToString(int value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="Int16"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Int16"/>.</returns>
public static string ToString(short value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="UInt16"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="UInt16"/>.</returns>
public static string ToString(ushort value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="UInt32"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="UInt32"/>.</returns>
public static string ToString(uint value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="Int64"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Int64"/>.</returns>
public static string ToString(long value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
private static string ToStringInternal(BigInteger value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="UInt64"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="UInt64"/>.</returns>
public static string ToString(ulong value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="Single"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Single"/>.</returns>
public static string ToString(float value)
{
return EnsureDecimalPlace(value, value.ToString("R", CultureInfo.InvariantCulture));
}
internal static string ToString(float value, FloatFormatHandling floatFormatHandling, char quoteChar, bool nullable)
{
return EnsureFloatFormat(value, EnsureDecimalPlace(value, value.ToString("R", CultureInfo.InvariantCulture)), floatFormatHandling, quoteChar, nullable);
}
private static string EnsureFloatFormat(double value, string text, FloatFormatHandling floatFormatHandling, char quoteChar, bool nullable)
{
if (floatFormatHandling == FloatFormatHandling.Symbol || !(double.IsInfinity(value) || double.IsNaN(value)))
return text;
if (floatFormatHandling == FloatFormatHandling.DefaultValue)
return (!nullable) ? "0.0" : Null;
return quoteChar + text + quoteChar;
}
/// <summary>
/// Converts the <see cref="Double"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Double"/>.</returns>
public static string ToString(double value)
{
return EnsureDecimalPlace(value, value.ToString("R", CultureInfo.InvariantCulture));
}
internal static string ToString(double value, FloatFormatHandling floatFormatHandling, char quoteChar, bool nullable)
{
return EnsureFloatFormat(value, EnsureDecimalPlace(value, value.ToString("R", CultureInfo.InvariantCulture)), floatFormatHandling, quoteChar, nullable);
}
private static string EnsureDecimalPlace(double value, string text)
{
if (double.IsNaN(value) || double.IsInfinity(value) || text.IndexOf('.') != -1 || text.IndexOf('E') != -1 || text.IndexOf('e') != -1)
return text;
return text + ".0";
}
private static string EnsureDecimalPlace(string text)
{
if (text.IndexOf('.') != -1)
return text;
return text + ".0";
}
/// <summary>
/// Converts the <see cref="Byte"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Byte"/>.</returns>
public static string ToString(byte value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="SByte"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="SByte"/>.</returns>
public static string ToString(sbyte value)
{
return value.ToString(null, CultureInfo.InvariantCulture);
}
/// <summary>
/// Converts the <see cref="Decimal"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="SByte"/>.</returns>
public static string ToString(decimal value)
{
return EnsureDecimalPlace(value.ToString(null, CultureInfo.InvariantCulture));
}
/// <summary>
/// Converts the <see cref="Guid"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Guid"/>.</returns>
public static string ToString(Guid value)
{
return ToString(value, '"');
}
internal static string ToString(Guid value, char quoteChar)
{
string text = null;
text = value.ToString("D");
return quoteChar + text + quoteChar;
}
/// <summary>
/// Converts the <see cref="TimeSpan"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="TimeSpan"/>.</returns>
public static string ToString(TimeSpan value)
{
return ToString(value, '"');
}
internal static string ToString(TimeSpan value, char quoteChar)
{
return ToString(value.ToString(), quoteChar);
}
/// <summary>
/// Converts the <see cref="Uri"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Uri"/>.</returns>
public static string ToString(Uri value)
{
if (value == null)
return Null;
return ToString(value, '"');
}
internal static string ToString(Uri value, char quoteChar)
{
return ToString(value.ToString(), quoteChar);
}
/// <summary>
/// Converts the <see cref="String"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="String"/>.</returns>
public static string ToString(string value)
{
return ToString(value, '"');
}
/// <summary>
/// Converts the <see cref="String"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <param name="delimiter">The string delimiter character.</param>
/// <returns>A JSON string representation of the <see cref="String"/>.</returns>
public static string ToString(string value, char delimiter)
{
if (delimiter != '"' && delimiter != '\'')
throw new ArgumentException("Delimiter must be a single or double quote.", "delimiter");
return JavaScriptUtils.ToEscapedJavaScriptString(value, delimiter, true);
}
/// <summary>
/// Converts the <see cref="Object"/> to its JSON string representation.
/// </summary>
/// <param name="value">The value to convert.</param>
/// <returns>A JSON string representation of the <see cref="Object"/>.</returns>
public static string ToString(object value)
{
if (value == null)
return Null;
PrimitiveTypeCode typeCode = ConvertUtils.GetTypeCode(value);
switch (typeCode)
{
case PrimitiveTypeCode.String:
return ToString((string) value);
case PrimitiveTypeCode.Char:
return ToString((char) value);
case PrimitiveTypeCode.Boolean:
return ToString((bool) value);
case PrimitiveTypeCode.SByte:
return ToString((sbyte) value);
case PrimitiveTypeCode.Int16:
return ToString((short) value);
case PrimitiveTypeCode.UInt16:
return ToString((ushort) value);
case PrimitiveTypeCode.Int32:
return ToString((int) value);
case PrimitiveTypeCode.Byte:
return ToString((byte) value);
case PrimitiveTypeCode.UInt32:
return ToString((uint) value);
case PrimitiveTypeCode.Int64:
return ToString((long) value);
case PrimitiveTypeCode.UInt64:
return ToString((ulong) value);
case PrimitiveTypeCode.Single:
return ToString((float) value);
case PrimitiveTypeCode.Double:
return ToString((double) value);
case PrimitiveTypeCode.DateTime:
return ToString((DateTime) value);
case PrimitiveTypeCode.Decimal:
return ToString((decimal) value);
case PrimitiveTypeCode.DateTimeOffset:
return ToString((DateTimeOffset) value);
case PrimitiveTypeCode.Guid:
return ToString((Guid) value);
case PrimitiveTypeCode.Uri:
return ToString((Uri) value);
case PrimitiveTypeCode.TimeSpan:
return ToString((TimeSpan) value);
case PrimitiveTypeCode.BigInteger:
return ToStringInternal((BigInteger)value);
}
throw new ArgumentException("Unsupported type: {0}. Use the JsonSerializer class to get the object's JSON representation.".FormatWith(CultureInfo.InvariantCulture, value.GetType()));
}
#region Serialize
/// <summary>
/// Serializes the specified object to a JSON string.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <returns>A JSON string representation of the object.</returns>
public static string SerializeObject(object value)
{
return SerializeObject(value, Formatting.None, (JsonSerializerSettings) null);
}
/// <summary>
/// Serializes the specified object to a JSON string using formatting.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <returns>
/// A JSON string representation of the object.
/// </returns>
public static string SerializeObject(object value, Formatting formatting)
{
return SerializeObject(value, formatting, (JsonSerializerSettings) null);
}
/// <summary>
/// Serializes the specified object to a JSON string using a collection of <see cref="JsonConverter"/>.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="converters">A collection converters used while serializing.</param>
/// <returns>A JSON string representation of the object.</returns>
public static string SerializeObject(object value, params JsonConverter[] converters)
{
return SerializeObject(value, Formatting.None, converters);
}
/// <summary>
/// Serializes the specified object to a JSON string using formatting and a collection of <see cref="JsonConverter"/>.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <param name="converters">A collection converters used while serializing.</param>
/// <returns>A JSON string representation of the object.</returns>
public static string SerializeObject(object value, Formatting formatting, params JsonConverter[] converters)
{
JsonSerializerSettings settings = (converters != null && converters.Length > 0)
? new JsonSerializerSettings {Converters = converters}
: null;
return SerializeObject(value, formatting, settings);
}
/// <summary>
/// Serializes the specified object to a JSON string using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="settings">The <see cref="JsonSerializerSettings"/> used to serialize the object.
/// If this is null, default serialization settings will be is used.</param>
/// <returns>
/// A JSON string representation of the object.
/// </returns>
public static string SerializeObject(object value, JsonSerializerSettings settings)
{
return SerializeObject(value, Formatting.None, settings);
}
/// <summary>
/// Serializes the specified object to a JSON string using formatting and <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <param name="settings">The <see cref="JsonSerializerSettings"/> used to serialize the object.
/// If this is null, default serialization settings will be is used.</param>
/// <returns>
/// A JSON string representation of the object.
/// </returns>
public static string SerializeObject(object value, Formatting formatting, JsonSerializerSettings settings)
{
return SerializeObject(value, null, formatting, settings);
}
/// <summary>
/// Serializes the specified object to a JSON string using a type, formatting and <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <param name="settings">The <see cref="JsonSerializerSettings"/> used to serialize the object.
/// If this is null, default serialization settings will be is used.</param>
/// <param name="type">
/// The type of the value being serialized.
/// This parameter is used when <see cref="TypeNameHandling"/> is Auto to write out the type name if the type of the value does not match.
/// Specifing the type is optional.
/// </param>
/// <returns>
/// A JSON string representation of the object.
/// </returns>
public static string SerializeObject(object value, Type type, Formatting formatting, JsonSerializerSettings settings)
{
JsonSerializer jsonSerializer = JsonSerializer.CreateDefault(settings);
StringBuilder sb = new StringBuilder(256);
StringWriter sw = new StringWriter(sb, CultureInfo.InvariantCulture);
using (JsonTextWriter jsonWriter = new JsonTextWriter(sw))
{
jsonWriter.Formatting = formatting;
jsonSerializer.Serialize(jsonWriter, value, type);
}
return sw.ToString();
}
/// <summary>
/// Asynchronously serializes the specified object to a JSON string.
/// Serialization will happen on a new thread.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <returns>
/// A task that represents the asynchronous serialize operation. The value of the <c>TResult</c> parameter contains a JSON string representation of the object.
/// </returns>
public static Task<string> SerializeObjectAsync(object value)
{
return SerializeObjectAsync(value, Formatting.None, null);
}
/// <summary>
/// Asynchronously serializes the specified object to a JSON string using formatting.
/// Serialization will happen on a new thread.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <returns>
/// A task that represents the asynchronous serialize operation. The value of the <c>TResult</c> parameter contains a JSON string representation of the object.
/// </returns>
public static Task<string> SerializeObjectAsync(object value, Formatting formatting)
{
return SerializeObjectAsync(value, formatting, null);
}
/// <summary>
/// Asynchronously serializes the specified object to a JSON string using formatting and a collection of <see cref="JsonConverter"/>.
/// Serialization will happen on a new thread.
/// </summary>
/// <param name="value">The object to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <param name="settings">The <see cref="JsonSerializerSettings"/> used to serialize the object.
/// If this is null, default serialization settings will be is used.</param>
/// <returns>
/// A task that represents the asynchronous serialize operation. The value of the <c>TResult</c> parameter contains a JSON string representation of the object.
/// </returns>
public static Task<string> SerializeObjectAsync(object value, Formatting formatting, JsonSerializerSettings settings)
{
return Task.Factory.StartNew(() => SerializeObject(value, formatting, settings));
}
#endregion
#region Deserialize
/// <summary>
/// Deserializes the JSON to a .NET object.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <returns>The deserialized object from the Json string.</returns>
public static object DeserializeObject(string value)
{
return DeserializeObject(value, null, (JsonSerializerSettings) null);
}
/// <summary>
/// Deserializes the JSON to a .NET object using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>The deserialized object from the JSON string.</returns>
public static object DeserializeObject(string value, JsonSerializerSettings settings)
{
return DeserializeObject(value, null, settings);
}
/// <summary>
/// Deserializes the JSON to the specified .NET type.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="type">The <see cref="Type"/> of object being deserialized.</param>
/// <returns>The deserialized object from the Json string.</returns>
public static object DeserializeObject(string value, Type type)
{
return DeserializeObject(value, type, (JsonSerializerSettings) null);
}
/// <summary>
/// Deserializes the JSON to the specified .NET type.
/// </summary>
/// <typeparam name="T">The type of the object to deserialize to.</typeparam>
/// <param name="value">The JSON to deserialize.</param>
/// <returns>The deserialized object from the Json string.</returns>
public static T DeserializeObject<T>(string value)
{
return DeserializeObject<T>(value, (JsonSerializerSettings) null);
}
/// <summary>
/// Deserializes the JSON to the given anonymous type.
/// </summary>
/// <typeparam name="T">
/// The anonymous type to deserialize to. This can't be specified
/// traditionally and must be infered from the anonymous type passed
/// as a parameter.
/// </typeparam>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="anonymousTypeObject">The anonymous type object.</param>
/// <returns>The deserialized anonymous type from the JSON string.</returns>
public static T DeserializeAnonymousType<T>(string value, T anonymousTypeObject)
{
return DeserializeObject<T>(value);
}
/// <summary>
/// Deserializes the JSON to the given anonymous type using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <typeparam name="T">
/// The anonymous type to deserialize to. This can't be specified
/// traditionally and must be infered from the anonymous type passed
/// as a parameter.
/// </typeparam>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="anonymousTypeObject">The anonymous type object.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>The deserialized anonymous type from the JSON string.</returns>
public static T DeserializeAnonymousType<T>(string value, T anonymousTypeObject, JsonSerializerSettings settings)
{
return DeserializeObject<T>(value, settings);
}
/// <summary>
/// Deserializes the JSON to the specified .NET type using a collection of <see cref="JsonConverter"/>.
/// </summary>
/// <typeparam name="T">The type of the object to deserialize to.</typeparam>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="converters">Converters to use while deserializing.</param>
/// <returns>The deserialized object from the JSON string.</returns>
public static T DeserializeObject<T>(string value, params JsonConverter[] converters)
{
return (T) DeserializeObject(value, typeof (T), converters);
}
/// <summary>
/// Deserializes the JSON to the specified .NET type using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <typeparam name="T">The type of the object to deserialize to.</typeparam>
/// <param name="value">The object to deserialize.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>The deserialized object from the JSON string.</returns>
public static T DeserializeObject<T>(string value, JsonSerializerSettings settings)
{
return (T) DeserializeObject(value, typeof (T), settings);
}
/// <summary>
/// Deserializes the JSON to the specified .NET type using a collection of <see cref="JsonConverter"/>.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="type">The type of the object to deserialize.</param>
/// <param name="converters">Converters to use while deserializing.</param>
/// <returns>The deserialized object from the JSON string.</returns>
public static object DeserializeObject(string value, Type type, params JsonConverter[] converters)
{
JsonSerializerSettings settings = (converters != null && converters.Length > 0)
? new JsonSerializerSettings {Converters = converters}
: null;
return DeserializeObject(value, type, settings);
}
/// <summary>
/// Deserializes the JSON to the specified .NET type using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="type">The type of the object to deserialize to.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>The deserialized object from the JSON string.</returns>
public static object DeserializeObject(string value, Type type, JsonSerializerSettings settings)
{
ValidationUtils.ArgumentNotNull(value, "value");
StringReader sr = new StringReader(value);
JsonSerializer jsonSerializer = JsonSerializer.CreateDefault(settings);
// by default DeserializeObject should check for additional content
if (!jsonSerializer.IsCheckAdditionalContentSet())
jsonSerializer.CheckAdditionalContent = true;
return jsonSerializer.Deserialize(new JsonTextReader(sr), type);
}
/// <summary>
/// Asynchronously deserializes the JSON to the specified .NET type.
/// Deserialization will happen on a new thread.
/// </summary>
/// <typeparam name="T">The type of the object to deserialize to.</typeparam>
/// <param name="value">The JSON to deserialize.</param>
/// <returns>
/// A task that represents the asynchronous deserialize operation. The value of the <c>TResult</c> parameter contains the deserialized object from the JSON string.
/// </returns>
public static Task<T> DeserializeObjectAsync<T>(string value)
{
return DeserializeObjectAsync<T>(value, null);
}
/// <summary>
/// Asynchronously deserializes the JSON to the specified .NET type using <see cref="JsonSerializerSettings"/>.
/// Deserialization will happen on a new thread.
/// </summary>
/// <typeparam name="T">The type of the object to deserialize to.</typeparam>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>
/// A task that represents the asynchronous deserialize operation. The value of the <c>TResult</c> parameter contains the deserialized object from the JSON string.
/// </returns>
public static Task<T> DeserializeObjectAsync<T>(string value, JsonSerializerSettings settings)
{
return Task.Factory.StartNew(() => DeserializeObject<T>(value, settings));
}
/// <summary>
/// Asynchronously deserializes the JSON to the specified .NET type.
/// Deserialization will happen on a new thread.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <returns>
/// A task that represents the asynchronous deserialize operation. The value of the <c>TResult</c> parameter contains the deserialized object from the JSON string.
/// </returns>
public static Task<object> DeserializeObjectAsync(string value)
{
return DeserializeObjectAsync(value, null, null);
}
/// <summary>
/// Asynchronously deserializes the JSON to the specified .NET type using <see cref="JsonSerializerSettings"/>.
/// Deserialization will happen on a new thread.
/// </summary>
/// <param name="value">The JSON to deserialize.</param>
/// <param name="type">The type of the object to deserialize to.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>
/// A task that represents the asynchronous deserialize operation. The value of the <c>TResult</c> parameter contains the deserialized object from the JSON string.
/// </returns>
public static Task<object> DeserializeObjectAsync(string value, Type type, JsonSerializerSettings settings)
{
return Task.Factory.StartNew(() => DeserializeObject(value, type, settings));
}
#endregion
/// <summary>
/// Populates the object with values from the JSON string.
/// </summary>
/// <param name="value">The JSON to populate values from.</param>
/// <param name="target">The target object to populate values onto.</param>
public static void PopulateObject(string value, object target)
{
PopulateObject(value, target, null);
}
/// <summary>
/// Populates the object with values from the JSON string using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The JSON to populate values from.</param>
/// <param name="target">The target object to populate values onto.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
public static void PopulateObject(string value, object target, JsonSerializerSettings settings)
{
StringReader sr = new StringReader(value);
JsonSerializer jsonSerializer = JsonSerializer.CreateDefault(settings);
using (JsonReader jsonReader = new JsonTextReader(sr))
{
jsonSerializer.Populate(jsonReader, target);
if (jsonReader.Read() && jsonReader.TokenType != JsonToken.Comment)
throw new JsonSerializationException("Additional text found in JSON string after finishing deserializing object.");
}
}
/// <summary>
/// Asynchronously populates the object with values from the JSON string using <see cref="JsonSerializerSettings"/>.
/// </summary>
/// <param name="value">The JSON to populate values from.</param>
/// <param name="target">The target object to populate values onto.</param>
/// <param name="settings">
/// The <see cref="JsonSerializerSettings"/> used to deserialize the object.
/// If this is null, default serialization settings will be is used.
/// </param>
/// <returns>
/// A task that represents the asynchronous populate operation.
/// </returns>
public static Task PopulateObjectAsync(string value, object target, JsonSerializerSettings settings)
{
return Task.Factory.StartNew(() => PopulateObject(value, target, settings));
}
/// <summary>
/// Serializes the <see cref="XNode"/> to a JSON string.
/// </summary>
/// <param name="node">The node to convert to JSON.</param>
/// <returns>A JSON string of the XNode.</returns>
public static string SerializeXNode(XObject node)
{
return SerializeXNode(node, Formatting.None);
}
/// <summary>
/// Serializes the <see cref="XNode"/> to a JSON string using formatting.
/// </summary>
/// <param name="node">The node to convert to JSON.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <returns>A JSON string of the XNode.</returns>
public static string SerializeXNode(XObject node, Formatting formatting)
{
return SerializeXNode(node, formatting, false);
}
/// <summary>
/// Serializes the <see cref="XNode"/> to a JSON string using formatting and omits the root object if <paramref name="omitRootObject"/> is <c>true</c>.
/// </summary>
/// <param name="node">The node to serialize.</param>
/// <param name="formatting">Indicates how the output is formatted.</param>
/// <param name="omitRootObject">Omits writing the root object.</param>
/// <returns>A JSON string of the XNode.</returns>
public static string SerializeXNode(XObject node, Formatting formatting, bool omitRootObject)
{
XmlNodeConverter converter = new XmlNodeConverter {OmitRootObject = omitRootObject};
return SerializeObject(node, formatting, converter);
}
/// <summary>
/// Deserializes the <see cref="XNode"/> from a JSON string.
/// </summary>
/// <param name="value">The JSON string.</param>
/// <returns>The deserialized XNode</returns>
public static XDocument DeserializeXNode(string value)
{
return DeserializeXNode(value, null);
}
/// <summary>
/// Deserializes the <see cref="XNode"/> from a JSON string nested in a root elment specified by <paramref name="deserializeRootElementName"/>.
/// </summary>
/// <param name="value">The JSON string.</param>
/// <param name="deserializeRootElementName">The name of the root element to append when deserializing.</param>
/// <returns>The deserialized XNode</returns>
public static XDocument DeserializeXNode(string value, string deserializeRootElementName)
{
return DeserializeXNode(value, deserializeRootElementName, false);
}
/// <summary>
/// Deserializes the <see cref="XNode"/> from a JSON string nested in a root elment specified by <paramref name="deserializeRootElementName"/>
/// and writes a .NET array attribute for collections.
/// </summary>
/// <param name="value">The JSON string.</param>
/// <param name="deserializeRootElementName">The name of the root element to append when deserializing.</param>
/// <param name="writeArrayAttribute">
/// A flag to indicate whether to write the Json.NET array attribute.
/// This attribute helps preserve arrays when converting the written XML back to JSON.
/// </param>
/// <returns>The deserialized XNode</returns>
public static XDocument DeserializeXNode(string value, string deserializeRootElementName, bool writeArrayAttribute)
{
XmlNodeConverter converter = new XmlNodeConverter();
converter.DeserializeRootElementName = deserializeRootElementName;
converter.WriteArrayAttribute = writeArrayAttribute;
return (XDocument) DeserializeObject(value, typeof (XDocument), converter);
}
}
}
#endif
| |
using System;
using System.IO;
using UnityEditorInternal;
using UnityEngine;
using UnityEditor;
using System.Collections;
using System.Collections.Generic;
using Projeny.Internal;
using System.Linq;
namespace Projeny.Internal
{
public class PmDragDropHandler
{
readonly PrjCommandHandler _prjCommandHandler;
readonly PmPackageHandler _packageHandler;
readonly AsyncProcessor _asyncProcessor;
readonly PmView _view;
readonly PmModel _model;
readonly EventManager _eventManager = new EventManager(null);
public PmDragDropHandler(
PmModel model,
PmView view,
AsyncProcessor asyncProcessor,
PmPackageHandler packageHandler,
PrjCommandHandler prjCommandHandler)
{
_prjCommandHandler = prjCommandHandler;
_packageHandler = packageHandler;
_asyncProcessor = asyncProcessor;
_view = view;
_model = model;
}
public void Initialize()
{
_view.DraggedDroppedListEntries += _eventManager.Add<DragListTypes, DragListTypes, List<DragListEntry>>(OnDraggedDroppedListEntries, EventQueueMode.LatestOnly);
}
public void Dispose()
{
_view.DraggedDroppedListEntries -= _eventManager.Remove<DragListTypes, DragListTypes, List<DragListEntry>>(OnDraggedDroppedListEntries);
_eventManager.AssertIsEmpty();
}
public void Update()
{
_eventManager.Flush();
}
void OnDraggedDroppedListEntries(DragListTypes sourceType, DragListTypes dropType, List<DragListEntry> entries)
{
switch (dropType)
{
case DragListTypes.Package:
{
switch (sourceType)
{
case DragListTypes.PluginItem:
{
foreach (var entry in entries)
{
var name = (string)entry.Model;
_model.RemovePluginItem(name);
}
break;
}
case DragListTypes.AssetItem:
{
foreach (var entry in entries)
{
var name = (string)entry.Model;
_model.RemoveAssetItem(name);
}
break;
}
case DragListTypes.Release:
{
_asyncProcessor.Process(
InstallReleasesAsync(
entries.Select(x => (ReleaseInfo)x.Model).ToList()), true, "Installing Releases");
break;
}
default:
{
Assert.Throw();
break;
}
}
break;
}
case DragListTypes.PluginItem:
{
switch (sourceType)
{
case DragListTypes.AssetItem:
{
foreach (var entry in entries)
{
var name = (string)entry.Model;
_model.RemoveAssetItem(name);
_model.AddPluginItem(name);
}
break;
}
case DragListTypes.PluginItem:
{
// Do nothing
break;
}
case DragListTypes.Package:
{
foreach (var entry in entries)
{
var info = (PackageInfo)entry.Model;
if (!_model.HasPluginItem(info.Name))
{
if (_model.HasAssetItem(info.Name))
{
_model.RemoveAssetItem(info.Name);
}
_model.AddPluginItem(info.Name);
}
}
break;
}
default:
{
Assert.Throw();
break;
}
}
break;
}
case DragListTypes.AssetItem:
{
switch (sourceType)
{
case DragListTypes.AssetItem:
{
// Do nothing
break;
}
case DragListTypes.PluginItem:
{
foreach (var entry in entries)
{
var name = (string)entry.Model;
_model.RemovePluginItem(name);
_model.AddAssetItem(name);
}
break;
}
case DragListTypes.Package:
{
foreach (var entry in entries)
{
var info = (PackageInfo)entry.Model;
if (!_model.HasAssetItem(info.Name))
{
if (_model.HasPluginItem(info.Name))
{
_model.RemovePluginItem(info.Name);
}
_model.AddAssetItem(info.Name);
}
}
break;
}
default:
{
Assert.Throw();
break;
}
}
break;
}
case DragListTypes.Release:
{
// nothing can drag to releases
break;
}
case DragListTypes.VsSolution:
{
if (sourceType == DragListTypes.AssetItem || sourceType == DragListTypes.PluginItem)
{
foreach (var entry in entries)
{
var name = (string)entry.Model;
if (!_model.HasVsProject(name))
{
_model.AddVsProject(name);
}
}
}
break;
}
default:
{
Assert.Throw();
break;
}
}
}
IEnumerator<InstallReleaseUserChoices> CheckShouldInstall(ReleaseInfo releaseInfo)
{
return CoRoutine.Wrap<InstallReleaseUserChoices>(CheckShouldInstallInternal(releaseInfo));
}
PackageInfo TryFindPackageInfoForRelease(ReleaseInfo releaseInfo)
{
foreach (var packageInfo in _model.AllPackages)
{
if (packageInfo.InstallInfo != null && packageInfo.InstallInfo.ReleaseInfo != null && packageInfo.InstallInfo.ReleaseInfo.Id == releaseInfo.Id)
{
return packageInfo;
}
}
return null;
}
IEnumerator CheckShouldInstallInternal(ReleaseInfo releaseInfo)
{
var packageInfo = TryFindPackageInfoForRelease(releaseInfo);
if (packageInfo == null)
{
yield return InstallReleaseUserChoices.Install;
yield break;
}
Assert.IsNotNull(packageInfo.InstallInfo);
var packageReleaseInfo = packageInfo.InstallInfo.ReleaseInfo;
Assert.IsNotNull(packageReleaseInfo);
// TODO - how to handle?
Assert.That(packageReleaseInfo.HasVersionCode);
Assert.That(releaseInfo.HasVersionCode);
IEnumerator<int> userChoice;
if (packageReleaseInfo.VersionCode == releaseInfo.VersionCode)
{
Assert.IsEqual(releaseInfo.Version, packageReleaseInfo.Version);
userChoice = _view.PromptForUserChoice(
"Package '{0}' is already installed with the same version ('{1}'). Would you like to re-install it anyway? Note that any local changes you've made to the package will be reverted."
.Fmt(packageReleaseInfo.Name, packageReleaseInfo.Version), new[] { "Overwrite", "Skip", "Cancel" }, null, null, 0, 2);
}
else if (releaseInfo.VersionCode > packageReleaseInfo.VersionCode)
{
userChoice = _view.PromptForUserChoice(
"Package '{0}' is already installed with version '{1}'. Would you like to UPGRADE it to version '{2}'? Note that any local changes you've made to the package will be lost."
.Fmt(releaseInfo.Name, packageReleaseInfo.Version, releaseInfo.Version), new[] { "Upgrade", "Skip", "Cancel" }, null, null, 0, 2);
}
else
{
Assert.That(releaseInfo.VersionCode < packageReleaseInfo.VersionCode);
userChoice = _view.PromptForUserChoice(
"Package '{0}' is already installed with version '{1}'. Would you like to DOWNGRADE it to version '{2}'? Note that any local changes you've made to the package will be lost."
.Fmt(releaseInfo.Name, packageReleaseInfo.Version, releaseInfo.Version), new[] { "Downgrade", "Skip", "Cancel" }, null, null, 0, 2);
}
yield return userChoice;
switch (userChoice.Current)
{
case 0:
{
yield return InstallReleaseUserChoices.Install;
break;
}
case 1:
{
yield return InstallReleaseUserChoices.Skip;
break;
}
case 2:
{
yield return InstallReleaseUserChoices.Cancel;
break;
}
default:
{
Assert.Throw();
break;
}
}
}
public IEnumerator InstallReleasesAsync(List<ReleaseInfo> releaseInfos)
{
// Need to make sure we have the most recent package list so we can determine whether this is
// an upgrade / downgrade / etc.
yield return _packageHandler.RefreshPackagesAsync();
Assert.That(releaseInfos.Select(x => x.Id).GetDuplicates().IsEmpty(), "Found duplicate releases selected - are you installing multiple versions of the same release?");
var packageRoot = _model.TryGetCurrentPackageFolderPath();
Assert.IsNotNull(packageRoot, "Please select a package folder before attempting to install a release");
foreach (var releaseInfo in releaseInfos)
{
var userChoice = CheckShouldInstall(releaseInfo);
yield return userChoice;
switch (userChoice.Current)
{
case InstallReleaseUserChoices.Cancel:
{
yield break;
}
case InstallReleaseUserChoices.Install:
{
yield return _prjCommandHandler.ProcessPrjCommand(
"Installing release '{0}'".Fmt(releaseInfo.Name), PrjHelper.InstallReleaseAsync(packageRoot, releaseInfo));
break;
}
case InstallReleaseUserChoices.Skip:
{
// Do nothing
break;
}
default:
{
Assert.Throw();
break;
}
}
}
yield return _packageHandler.RefreshPackagesAsync();
}
enum InstallReleaseUserChoices
{
Install,
Cancel,
Skip,
}
}
}
| |
namespace VhdAttach {
partial class SettingsForm {
/// <summary>
/// Required designer variable.
/// </summary>
private System.ComponentModel.IContainer components = null;
/// <summary>
/// Clean up any resources being used.
/// </summary>
/// <param name="disposing">true if managed resources should be disposed; otherwise, false.</param>
protected override void Dispose(bool disposing) {
if (disposing && (components != null)) {
components.Dispose();
}
base.Dispose(disposing);
}
#region Windows Form Designer generated code
/// <summary>
/// Required method for Designer support - do not modify
/// the contents of this method with the code editor.
/// </summary>
private void InitializeComponent() {
this.components = new System.ComponentModel.Container();
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(SettingsForm));
this.buttonCancel = new System.Windows.Forms.Button();
this.buttonOk = new System.Windows.Forms.Button();
this.erp = new System.Windows.Forms.ErrorProvider(this.components);
this.groupContextMenu = new System.Windows.Forms.GroupBox();
this.checkIsoOpen = new System.Windows.Forms.CheckBox();
this.checkVhdOpen = new System.Windows.Forms.CheckBox();
this.checkIsoDetach = new System.Windows.Forms.CheckBox();
this.checkIsoAttachReadOnly = new System.Windows.Forms.CheckBox();
this.checkVhdAttachReadOnly = new System.Windows.Forms.CheckBox();
this.checkVhdDetachDrive = new System.Windows.Forms.CheckBox();
this.checkVhdDetach = new System.Windows.Forms.CheckBox();
this.checkVhdAttach = new System.Windows.Forms.CheckBox();
this.groupAutoAttach = new System.Windows.Forms.GroupBox();
this.toolVhdOrder = new System.Windows.Forms.ToolStrip();
this.buttonMoveVhdUp = new System.Windows.Forms.ToolStripButton();
this.buttonMoveVhdDown = new System.Windows.Forms.ToolStripButton();
this.buttonVhdReadOnly = new System.Windows.Forms.ToolStripButton();
this.buttonVhdRemove = new System.Windows.Forms.Button();
this.buttonVhdAdd = new System.Windows.Forms.Button();
this.listAutoAttach = new System.Windows.Forms.ListView();
this.columnFileName = ((System.Windows.Forms.ColumnHeader)(new System.Windows.Forms.ColumnHeader()));
this.imagesAutoAttach = new System.Windows.Forms.ImageList(this.components);
this.btnRegisterExtensionVhd = new System.Windows.Forms.Button();
((System.ComponentModel.ISupportInitialize)(this.erp)).BeginInit();
this.groupContextMenu.SuspendLayout();
this.groupAutoAttach.SuspendLayout();
this.toolVhdOrder.SuspendLayout();
this.SuspendLayout();
//
// buttonCancel
//
this.buttonCancel.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.buttonCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.buttonCancel.Location = new System.Drawing.Point(320, 365);
this.buttonCancel.Margin = new System.Windows.Forms.Padding(3, 15, 3, 3);
this.buttonCancel.Name = "buttonCancel";
this.buttonCancel.Size = new System.Drawing.Size(100, 28);
this.buttonCancel.TabIndex = 0;
this.buttonCancel.Text = "Cancel";
this.buttonCancel.UseVisualStyleBackColor = true;
//
// buttonOk
//
this.buttonOk.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Right)));
this.buttonOk.DialogResult = System.Windows.Forms.DialogResult.OK;
this.buttonOk.Location = new System.Drawing.Point(214, 365);
this.buttonOk.Margin = new System.Windows.Forms.Padding(3, 15, 3, 3);
this.buttonOk.Name = "buttonOk";
this.buttonOk.Size = new System.Drawing.Size(100, 28);
this.buttonOk.TabIndex = 4;
this.buttonOk.Text = "OK";
this.buttonOk.UseVisualStyleBackColor = true;
this.buttonOk.Click += new System.EventHandler(this.buttonOk_Click);
//
// erp
//
this.erp.BlinkStyle = System.Windows.Forms.ErrorBlinkStyle.NeverBlink;
this.erp.ContainerControl = this;
this.erp.Icon = ((System.Drawing.Icon)(resources.GetObject("erp.Icon")));
//
// groupContextMenu
//
this.groupContextMenu.Controls.Add(this.checkIsoOpen);
this.groupContextMenu.Controls.Add(this.checkVhdOpen);
this.groupContextMenu.Controls.Add(this.checkIsoDetach);
this.groupContextMenu.Controls.Add(this.checkIsoAttachReadOnly);
this.groupContextMenu.Controls.Add(this.checkVhdAttachReadOnly);
this.groupContextMenu.Controls.Add(this.checkVhdDetachDrive);
this.groupContextMenu.Controls.Add(this.checkVhdDetach);
this.groupContextMenu.Controls.Add(this.checkVhdAttach);
this.groupContextMenu.Location = new System.Drawing.Point(12, 12);
this.groupContextMenu.Name = "groupContextMenu";
this.groupContextMenu.Size = new System.Drawing.Size(408, 108);
this.groupContextMenu.TabIndex = 1;
this.groupContextMenu.TabStop = false;
this.groupContextMenu.Text = "Explorer context menu";
//
// checkIsoOpen
//
this.checkIsoOpen.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.checkIsoOpen.AutoSize = true;
this.checkIsoOpen.Location = new System.Drawing.Point(290, 27);
this.checkIsoOpen.Margin = new System.Windows.Forms.Padding(3, 9, 3, 3);
this.checkIsoOpen.Name = "checkIsoOpen";
this.checkIsoOpen.Size = new System.Drawing.Size(102, 21);
this.checkIsoOpen.TabIndex = 5;
this.checkIsoOpen.Text = "Open (ISO)";
this.checkIsoOpen.UseVisualStyleBackColor = true;
//
// checkVhdOpen
//
this.checkVhdOpen.AutoSize = true;
this.checkVhdOpen.Location = new System.Drawing.Point(6, 27);
this.checkVhdOpen.Margin = new System.Windows.Forms.Padding(3, 9, 3, 3);
this.checkVhdOpen.Name = "checkVhdOpen";
this.checkVhdOpen.Size = new System.Drawing.Size(65, 21);
this.checkVhdOpen.TabIndex = 0;
this.checkVhdOpen.Text = "Open";
this.checkVhdOpen.UseVisualStyleBackColor = true;
//
// checkIsoDetach
//
this.checkIsoDetach.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.checkIsoDetach.AutoSize = true;
this.checkIsoDetach.Location = new System.Drawing.Point(290, 80);
this.checkIsoDetach.Margin = new System.Windows.Forms.Padding(3, 2, 3, 2);
this.checkIsoDetach.Name = "checkIsoDetach";
this.checkIsoDetach.Size = new System.Drawing.Size(112, 21);
this.checkIsoDetach.TabIndex = 7;
this.checkIsoDetach.Text = "Detach (ISO)";
this.checkIsoDetach.UseVisualStyleBackColor = true;
//
// checkIsoAttachReadOnly
//
this.checkIsoAttachReadOnly.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Right)));
this.checkIsoAttachReadOnly.AutoSize = true;
this.checkIsoAttachReadOnly.Location = new System.Drawing.Point(290, 54);
this.checkIsoAttachReadOnly.Name = "checkIsoAttachReadOnly";
this.checkIsoAttachReadOnly.Size = new System.Drawing.Size(107, 21);
this.checkIsoAttachReadOnly.TabIndex = 6;
this.checkIsoAttachReadOnly.Text = "Attach (ISO)";
this.checkIsoAttachReadOnly.UseVisualStyleBackColor = true;
//
// checkVhdAttachReadOnly
//
this.checkVhdAttachReadOnly.AutoSize = true;
this.checkVhdAttachReadOnly.Location = new System.Drawing.Point(112, 54);
this.checkVhdAttachReadOnly.Margin = new System.Windows.Forms.Padding(3, 9, 3, 3);
this.checkVhdAttachReadOnly.Name = "checkVhdAttachReadOnly";
this.checkVhdAttachReadOnly.Size = new System.Drawing.Size(144, 21);
this.checkVhdAttachReadOnly.TabIndex = 2;
this.checkVhdAttachReadOnly.Text = "Attach (read-only)";
this.checkVhdAttachReadOnly.UseVisualStyleBackColor = true;
//
// checkVhdDetachDrive
//
this.checkVhdDetachDrive.AutoSize = true;
this.checkVhdDetachDrive.Location = new System.Drawing.Point(112, 81);
this.checkVhdDetachDrive.Name = "checkVhdDetachDrive";
this.checkVhdDetachDrive.Size = new System.Drawing.Size(110, 21);
this.checkVhdDetachDrive.TabIndex = 4;
this.checkVhdDetachDrive.Text = "Detach drive";
this.checkVhdDetachDrive.UseVisualStyleBackColor = true;
//
// checkVhdDetach
//
this.checkVhdDetach.AutoSize = true;
this.checkVhdDetach.Location = new System.Drawing.Point(6, 81);
this.checkVhdDetach.Name = "checkVhdDetach";
this.checkVhdDetach.Size = new System.Drawing.Size(75, 21);
this.checkVhdDetach.TabIndex = 3;
this.checkVhdDetach.Text = "Detach";
this.checkVhdDetach.UseVisualStyleBackColor = true;
//
// checkVhdAttach
//
this.checkVhdAttach.AutoSize = true;
this.checkVhdAttach.Location = new System.Drawing.Point(6, 54);
this.checkVhdAttach.Name = "checkVhdAttach";
this.checkVhdAttach.Size = new System.Drawing.Size(70, 21);
this.checkVhdAttach.TabIndex = 1;
this.checkVhdAttach.Text = "Attach";
this.checkVhdAttach.UseVisualStyleBackColor = true;
//
// groupAutoAttach
//
this.groupAutoAttach.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.groupAutoAttach.Controls.Add(this.toolVhdOrder);
this.groupAutoAttach.Controls.Add(this.buttonVhdRemove);
this.groupAutoAttach.Controls.Add(this.buttonVhdAdd);
this.groupAutoAttach.Controls.Add(this.listAutoAttach);
this.groupAutoAttach.Location = new System.Drawing.Point(12, 132);
this.groupAutoAttach.Margin = new System.Windows.Forms.Padding(3, 9, 3, 3);
this.groupAutoAttach.Name = "groupAutoAttach";
this.groupAutoAttach.Size = new System.Drawing.Size(408, 215);
this.groupAutoAttach.TabIndex = 2;
this.groupAutoAttach.TabStop = false;
this.groupAutoAttach.Text = "Auto-attach VHDs";
//
// toolVhdOrder
//
this.toolVhdOrder.AllowMerge = false;
this.toolVhdOrder.Anchor = ((System.Windows.Forms.AnchorStyles)(((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Right)));
this.toolVhdOrder.AutoSize = false;
this.toolVhdOrder.CanOverflow = false;
this.toolVhdOrder.Dock = System.Windows.Forms.DockStyle.None;
this.toolVhdOrder.GripStyle = System.Windows.Forms.ToolStripGripStyle.Hidden;
this.toolVhdOrder.Items.AddRange(new System.Windows.Forms.ToolStripItem[] {
this.buttonMoveVhdUp,
this.buttonMoveVhdDown,
this.buttonVhdReadOnly});
this.toolVhdOrder.LayoutStyle = System.Windows.Forms.ToolStripLayoutStyle.Flow;
this.toolVhdOrder.Location = new System.Drawing.Point(374, 27);
this.toolVhdOrder.Name = "toolVhdOrder";
this.toolVhdOrder.RenderMode = System.Windows.Forms.ToolStripRenderMode.System;
this.toolVhdOrder.Size = new System.Drawing.Size(32, 148);
this.toolVhdOrder.TabIndex = 1;
//
// buttonMoveVhdUp
//
this.buttonMoveVhdUp.DisplayStyle = System.Windows.Forms.ToolStripItemDisplayStyle.Image;
this.buttonMoveVhdUp.Enabled = false;
this.buttonMoveVhdUp.Image = global::VhdAttach.Properties.Resources.buttonMoveVhdUp_16;
this.buttonMoveVhdUp.ImageTransparentColor = System.Drawing.Color.Magenta;
this.buttonMoveVhdUp.Name = "buttonMoveVhdUp";
this.buttonMoveVhdUp.Size = new System.Drawing.Size(23, 20);
this.buttonMoveVhdUp.Text = "Move Up";
this.buttonMoveVhdUp.ToolTipText = "Move up (Alt+Up)";
this.buttonMoveVhdUp.Click += new System.EventHandler(this.buttonMoveVhdUp_Click);
//
// buttonMoveVhdDown
//
this.buttonMoveVhdDown.DisplayStyle = System.Windows.Forms.ToolStripItemDisplayStyle.Image;
this.buttonMoveVhdDown.Enabled = false;
this.buttonMoveVhdDown.Image = global::VhdAttach.Properties.Resources.buttonMoveVhdDown_16;
this.buttonMoveVhdDown.ImageTransparentColor = System.Drawing.Color.Magenta;
this.buttonMoveVhdDown.Name = "buttonMoveVhdDown";
this.buttonMoveVhdDown.RightToLeftAutoMirrorImage = true;
this.buttonMoveVhdDown.Size = new System.Drawing.Size(23, 20);
this.buttonMoveVhdDown.Text = "Move down";
this.buttonMoveVhdDown.ToolTipText = "Move down (Alt+Down)";
this.buttonMoveVhdDown.Click += new System.EventHandler(this.buttonMoveVhdDown_Click);
//
// buttonVhdReadOnly
//
this.buttonVhdReadOnly.CheckOnClick = true;
this.buttonVhdReadOnly.DisplayStyle = System.Windows.Forms.ToolStripItemDisplayStyle.Image;
this.buttonVhdReadOnly.Image = global::VhdAttach.Properties.Resources.buttonVhdReadOnly_16;
this.buttonVhdReadOnly.ImageTransparentColor = System.Drawing.Color.Magenta;
this.buttonVhdReadOnly.Margin = new System.Windows.Forms.Padding(0, 8, 0, 2);
this.buttonVhdReadOnly.Name = "buttonVhdReadOnly";
this.buttonVhdReadOnly.Size = new System.Drawing.Size(23, 20);
this.buttonVhdReadOnly.Text = "Read-only";
this.buttonVhdReadOnly.Click += new System.EventHandler(this.buttonVhdReadOnly_Click);
//
// buttonVhdRemove
//
this.buttonVhdRemove.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.buttonVhdRemove.Enabled = false;
this.buttonVhdRemove.Location = new System.Drawing.Point(112, 181);
this.buttonVhdRemove.Name = "buttonVhdRemove";
this.buttonVhdRemove.Size = new System.Drawing.Size(100, 28);
this.buttonVhdRemove.TabIndex = 3;
this.buttonVhdRemove.Text = "&Remove";
this.buttonVhdRemove.UseVisualStyleBackColor = true;
this.buttonVhdRemove.Click += new System.EventHandler(this.buttonVhdRemove_Click);
//
// buttonVhdAdd
//
this.buttonVhdAdd.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.buttonVhdAdd.Location = new System.Drawing.Point(6, 181);
this.buttonVhdAdd.Name = "buttonVhdAdd";
this.buttonVhdAdd.Size = new System.Drawing.Size(100, 28);
this.buttonVhdAdd.TabIndex = 2;
this.buttonVhdAdd.Text = "&Add";
this.buttonVhdAdd.UseVisualStyleBackColor = true;
this.buttonVhdAdd.Click += new System.EventHandler(this.buttonVhdAdd_Click);
//
// listAutoAttach
//
this.listAutoAttach.Anchor = ((System.Windows.Forms.AnchorStyles)((((System.Windows.Forms.AnchorStyles.Top | System.Windows.Forms.AnchorStyles.Bottom)
| System.Windows.Forms.AnchorStyles.Left)
| System.Windows.Forms.AnchorStyles.Right)));
this.listAutoAttach.Columns.AddRange(new System.Windows.Forms.ColumnHeader[] {
this.columnFileName});
this.listAutoAttach.FullRowSelect = true;
this.listAutoAttach.GridLines = true;
this.listAutoAttach.HeaderStyle = System.Windows.Forms.ColumnHeaderStyle.Nonclickable;
this.listAutoAttach.HideSelection = false;
this.listAutoAttach.Location = new System.Drawing.Point(6, 27);
this.listAutoAttach.Margin = new System.Windows.Forms.Padding(3, 9, 3, 3);
this.listAutoAttach.MultiSelect = false;
this.listAutoAttach.Name = "listAutoAttach";
this.listAutoAttach.ShowItemToolTips = true;
this.listAutoAttach.Size = new System.Drawing.Size(365, 148);
this.listAutoAttach.SmallImageList = this.imagesAutoAttach;
this.listAutoAttach.TabIndex = 0;
this.listAutoAttach.UseCompatibleStateImageBehavior = false;
this.listAutoAttach.View = System.Windows.Forms.View.Details;
this.listAutoAttach.SelectedIndexChanged += new System.EventHandler(this.listAutoAttach_SelectedIndexChanged);
this.listAutoAttach.PreviewKeyDown += new System.Windows.Forms.PreviewKeyDownEventHandler(this.listAutoAttach_PreviewKeyDown);
//
// columnFileName
//
this.columnFileName.Text = "File name";
this.columnFileName.Width = 200;
//
// imagesAutoAttach
//
this.imagesAutoAttach.ColorDepth = System.Windows.Forms.ColorDepth.Depth8Bit;
this.imagesAutoAttach.ImageSize = new System.Drawing.Size(16, 16);
this.imagesAutoAttach.TransparentColor = System.Drawing.Color.Transparent;
//
// btnRegisterExtensionVhd
//
this.btnRegisterExtensionVhd.Anchor = ((System.Windows.Forms.AnchorStyles)((System.Windows.Forms.AnchorStyles.Bottom | System.Windows.Forms.AnchorStyles.Left)));
this.btnRegisterExtensionVhd.Location = new System.Drawing.Point(12, 365);
this.btnRegisterExtensionVhd.Margin = new System.Windows.Forms.Padding(3, 15, 3, 3);
this.btnRegisterExtensionVhd.Name = "btnRegisterExtensionVhd";
this.btnRegisterExtensionVhd.Size = new System.Drawing.Size(149, 28);
this.btnRegisterExtensionVhd.TabIndex = 3;
this.btnRegisterExtensionVhd.Text = "&Register extension";
this.btnRegisterExtensionVhd.UseVisualStyleBackColor = true;
this.btnRegisterExtensionVhd.Visible = false;
this.btnRegisterExtensionVhd.Click += new System.EventHandler(this.btnRegisterExtensionVhd_Click);
//
// SettingsForm
//
this.AcceptButton = this.buttonOk;
this.AutoScaleDimensions = new System.Drawing.SizeF(8F, 16F);
this.AutoScaleMode = System.Windows.Forms.AutoScaleMode.Font;
this.CancelButton = this.buttonCancel;
this.ClientSize = new System.Drawing.Size(432, 405);
this.Controls.Add(this.btnRegisterExtensionVhd);
this.Controls.Add(this.groupAutoAttach);
this.Controls.Add(this.groupContextMenu);
this.Controls.Add(this.buttonCancel);
this.Controls.Add(this.buttonOk);
this.FormBorderStyle = System.Windows.Forms.FormBorderStyle.FixedDialog;
this.Margin = new System.Windows.Forms.Padding(3, 2, 3, 2);
this.MaximizeBox = false;
this.MinimizeBox = false;
this.Name = "SettingsForm";
this.ShowIcon = false;
this.ShowInTaskbar = false;
this.StartPosition = System.Windows.Forms.FormStartPosition.CenterParent;
this.Text = "Options";
this.Load += new System.EventHandler(this.SettingsForm_Load);
this.Resize += new System.EventHandler(this.SettingsForm_Resize);
((System.ComponentModel.ISupportInitialize)(this.erp)).EndInit();
this.groupContextMenu.ResumeLayout(false);
this.groupContextMenu.PerformLayout();
this.groupAutoAttach.ResumeLayout(false);
this.toolVhdOrder.ResumeLayout(false);
this.toolVhdOrder.PerformLayout();
this.ResumeLayout(false);
}
#endregion
private System.Windows.Forms.Button buttonCancel;
private System.Windows.Forms.Button buttonOk;
private System.Windows.Forms.ErrorProvider erp;
private System.Windows.Forms.GroupBox groupContextMenu;
private System.Windows.Forms.CheckBox checkVhdDetachDrive;
private System.Windows.Forms.CheckBox checkVhdDetach;
private System.Windows.Forms.CheckBox checkVhdAttach;
private System.Windows.Forms.GroupBox groupAutoAttach;
private System.Windows.Forms.Button buttonVhdRemove;
private System.Windows.Forms.Button buttonVhdAdd;
private System.Windows.Forms.ListView listAutoAttach;
private System.Windows.Forms.ColumnHeader columnFileName;
private System.Windows.Forms.ToolStrip toolVhdOrder;
private System.Windows.Forms.ToolStripButton buttonMoveVhdUp;
private System.Windows.Forms.ToolStripButton buttonMoveVhdDown;
private System.Windows.Forms.ImageList imagesAutoAttach;
private System.Windows.Forms.CheckBox checkVhdAttachReadOnly;
private System.Windows.Forms.Button btnRegisterExtensionVhd;
private System.Windows.Forms.ToolStripButton buttonVhdReadOnly;
private System.Windows.Forms.CheckBox checkIsoDetach;
private System.Windows.Forms.CheckBox checkIsoAttachReadOnly;
private System.Windows.Forms.CheckBox checkVhdOpen;
private System.Windows.Forms.CheckBox checkIsoOpen;
}
}
| |
//------------------------------------------------------------------------------
// <copyright file="RadioButtonRenderer.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//------------------------------------------------------------------------------
namespace System.Windows.Forms {
using System;
using System.Drawing;
using System.Diagnostics.CodeAnalysis;
using System.Windows.Forms.Internal;
using System.Windows.Forms.VisualStyles;
using Microsoft.Win32;
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer"]/*' />
/// <devdoc>
/// <para>
/// This is a rendering class for the RadioButton control. It works downlevel too (obviously
/// without visual styles applied.)
/// </para>
/// </devdoc>
public sealed class RadioButtonRenderer {
//Make this per-thread, so that different threads can safely use these methods.
[ThreadStatic]
private static VisualStyleRenderer visualStyleRenderer = null;
private static readonly VisualStyleElement RadioElement = VisualStyleElement.Button.RadioButton.UncheckedNormal;
private static bool renderMatchingApplicationState = true;
//cannot instantiate
private RadioButtonRenderer() {
}
/// <include file='doc\ButtonRenderer.uex' path='docs/doc[@for="ButtonRenderer.RenderMatchingApplicationState"]/*' />
/// <devdoc>
/// <para>
/// If this property is true, then the renderer will use the setting from Application.RenderWithVisualStyles to
/// determine how to render.
/// If this property is false, the renderer will always render with visualstyles.
/// </para>
/// </devdoc>
public static bool RenderMatchingApplicationState {
get {
return renderMatchingApplicationState;
}
set {
renderMatchingApplicationState = value;
}
}
private static bool RenderWithVisualStyles {
get {
return (!renderMatchingApplicationState || Application.RenderWithVisualStyles);
}
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.IsBackgroundPartiallyTransparent"]/*' />
/// <devdoc>
/// <para>
/// Returns true if the background corresponding to the given state is partially transparent, else false.
/// </para>
/// </devdoc>
public static bool IsBackgroundPartiallyTransparent(RadioButtonState state) {
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
return visualStyleRenderer.IsBackgroundPartiallyTransparent();
}
else {
return false; //for downlevel, this is false
}
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.DrawParentBackground"]/*' />
/// <devdoc>
/// <para>
/// This is just a convenience wrapper for VisualStyleRenderer.DrawThemeParentBackground. For downlevel,
/// this isn't required and does nothing.
/// </para>
/// </devdoc>
[
SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters") // Using Graphics instead of IDeviceContext intentionally
]
public static void DrawParentBackground(Graphics g, Rectangle bounds, Control childControl) {
if (RenderWithVisualStyles) {
InitializeRenderer(0);
visualStyleRenderer.DrawParentBackground(g, bounds, childControl);
}
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.DrawRadioButton"]/*' />
/// <devdoc>
/// <para>
/// Renders a RadioButton control.
/// </para>
/// </devdoc>
public static void DrawRadioButton(Graphics g, Point glyphLocation, RadioButtonState state) {
Rectangle glyphBounds = new Rectangle(glyphLocation, GetGlyphSize(g, state));
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
visualStyleRenderer.DrawBackground(g, glyphBounds);
}
else {
ControlPaint.DrawRadioButton(g, glyphBounds, ConvertToButtonState(state));
}
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.DrawRadioButton1"]/*' />
/// <devdoc>
/// <para>
/// Renders a RadioButton control.
/// </para>
/// </devdoc>
public static void DrawRadioButton(Graphics g, Point glyphLocation, Rectangle textBounds, string radioButtonText, Font font, bool focused, RadioButtonState state) {
DrawRadioButton(g, glyphLocation, textBounds, radioButtonText, font,
TextFormatFlags.HorizontalCenter | TextFormatFlags.VerticalCenter | TextFormatFlags.SingleLine,
focused, state);
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.DrawRadioButton2"]/*' />
/// <devdoc>
/// <para>
/// Renders a RadioButton control.
/// </para>
/// </devdoc>
public static void DrawRadioButton(Graphics g, Point glyphLocation, Rectangle textBounds, string radioButtonText, Font font, TextFormatFlags flags, bool focused, RadioButtonState state) {
Rectangle glyphBounds = new Rectangle(glyphLocation, GetGlyphSize(g, state));
Color textColor;
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
visualStyleRenderer.DrawBackground(g, glyphBounds);
textColor = visualStyleRenderer.GetColor(ColorProperty.TextColor);
}
else {
ControlPaint.DrawRadioButton(g, glyphBounds, ConvertToButtonState(state));
textColor = SystemColors.ControlText;
}
TextRenderer.DrawText(g, radioButtonText, font, textBounds, textColor, flags);
if (focused) {
ControlPaint.DrawFocusRectangle(g, textBounds);
}
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.DrawRadioButton3"]/*' />
/// <devdoc>
/// <para>
/// Renders a RadioButton control.
/// </para>
/// </devdoc>
public static void DrawRadioButton(Graphics g, Point glyphLocation, Rectangle textBounds, string radioButtonText, Font font, Image image, Rectangle imageBounds, bool focused, RadioButtonState state) {
DrawRadioButton(g, glyphLocation, textBounds, radioButtonText, font,
TextFormatFlags.HorizontalCenter | TextFormatFlags.VerticalCenter | TextFormatFlags.SingleLine,
image, imageBounds, focused, state);
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.DrawRadioButton4"]/*' />
/// <devdoc>
/// <para>
/// Renders a RadioButton control.
/// </para>
/// </devdoc>
public static void DrawRadioButton(Graphics g, Point glyphLocation, Rectangle textBounds, string radioButtonText, Font font, TextFormatFlags flags, Image image, Rectangle imageBounds, bool focused, RadioButtonState state) {
Rectangle glyphBounds = new Rectangle(glyphLocation, GetGlyphSize(g, state));
Color textColor;
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
//Keep this drawing order! It matches default drawing order.
visualStyleRenderer.DrawImage(g, imageBounds, image);
visualStyleRenderer.DrawBackground(g, glyphBounds);
textColor = visualStyleRenderer.GetColor(ColorProperty.TextColor);
}
else {
g.DrawImage(image, imageBounds);
ControlPaint.DrawRadioButton(g, glyphBounds, ConvertToButtonState(state));
textColor = SystemColors.ControlText;
}
TextRenderer.DrawText(g, radioButtonText, font, textBounds, textColor, flags);
if (focused) {
ControlPaint.DrawFocusRectangle(g, textBounds);
}
}
/// <include file='doc\RadioButtonRenderer.uex' path='docs/doc[@for="RadioButtonRenderer.GetGlyphSize"]/*' />
/// <devdoc>
/// <para>
/// Returns the size of the RadioButton glyph.
/// </para>
/// </devdoc>
[
SuppressMessage("Microsoft.Design", "CA1011:ConsiderPassingBaseTypesAsParameters") // Using Graphics instead of IDeviceContext intentionally
]
public static Size GetGlyphSize(Graphics g, RadioButtonState state) {
if (RenderWithVisualStyles) {
InitializeRenderer((int)state);
return visualStyleRenderer.GetPartSize(g, ThemeSizeType.Draw);
}
return new Size(13, 13);
}
internal static ButtonState ConvertToButtonState(RadioButtonState state) {
switch (state) {
case RadioButtonState.CheckedNormal:
case RadioButtonState.CheckedHot:
return ButtonState.Checked;
case RadioButtonState.CheckedPressed:
return (ButtonState.Checked | ButtonState.Pushed);
case RadioButtonState.CheckedDisabled:
return (ButtonState.Checked | ButtonState.Inactive);
case RadioButtonState.UncheckedPressed:
return ButtonState.Pushed;
case RadioButtonState.UncheckedDisabled:
return ButtonState.Inactive;
default:
return ButtonState.Normal;
}
}
internal static RadioButtonState ConvertFromButtonState(ButtonState state, bool isHot) {
if ((state & ButtonState.Checked) == ButtonState.Checked) {
if ((state & ButtonState.Pushed) == ButtonState.Pushed) {
return RadioButtonState.CheckedPressed;
}
else if ((state & ButtonState.Inactive) == ButtonState.Inactive) {
return RadioButtonState.CheckedDisabled;
}
else if (isHot) {
return RadioButtonState.CheckedHot;
}
return RadioButtonState.CheckedNormal;
}
else { //unchecked
if ((state & ButtonState.Pushed) == ButtonState.Pushed) {
return RadioButtonState.UncheckedPressed;
}
else if ((state & ButtonState.Inactive) == ButtonState.Inactive) {
return RadioButtonState.UncheckedDisabled;
}
else if (isHot) {
return RadioButtonState.UncheckedHot;
}
return RadioButtonState.UncheckedNormal;
}
}
private static void InitializeRenderer(int state) {
if (visualStyleRenderer == null) {
visualStyleRenderer = new VisualStyleRenderer(RadioElement.ClassName, RadioElement.Part, state);
}
else {
visualStyleRenderer.SetParameters(RadioElement.ClassName, RadioElement.Part, state);
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.ComponentModel;
using System.Data.Common;
using System.Data.SqlTypes;
using System.Diagnostics;
using System.Globalization;
using System.Runtime.InteropServices;
using System.Text;
namespace System.Data.Odbc
{
public sealed partial class OdbcParameter : DbParameter, ICloneable, IDataParameter, IDbDataParameter
{
private bool _hasChanged;
private bool _userSpecifiedType;
// _typemap User explicit set type or default parameter type
// _infertpe _typemap if the user explicitly sets type
// otherwise it is infered from the value
// _bindtype The actual type used for binding. E.g. string substitutes numeric
//
// set_DbType: _bindtype = _infertype = _typemap = TypeMap.FromDbType(value)
// set_OdbcType: _bindtype = _infertype = _typemap = TypeMap.FromOdbcType(value)
//
// GetParameterType: If _typemap != _infertype AND value != 0
// _bindtype = _infertype = TypeMap.FromSystemType(value.GetType());
// otherwise
// _bindtype = _infertype
//
// Bind: Bind may change _bindtype if the type is not supported through the driver
//
private TypeMap _typemap;
private TypeMap _bindtype;
private string _parameterName;
private byte _precision;
private byte _scale;
private bool _hasScale;
private ODBC32.SQL_C _boundSqlCType;
private ODBC32.SQL_TYPE _boundParameterType; // if we bound already that is the type we used
private int _boundSize;
private int _boundScale;
private IntPtr _boundBuffer;
private IntPtr _boundIntbuffer;
private TypeMap _originalbindtype; // the original type in case we had to change the bindtype
// (e.g. decimal to string)
private byte _internalPrecision;
private bool _internalShouldSerializeSize;
private int _internalSize;
private ParameterDirection _internalDirection;
private byte _internalScale;
private int _internalOffset;
internal bool _internalUserSpecifiedType;
private object _internalValue;
private int _preparedOffset;
private int _preparedSize;
private int _preparedBufferSize;
private object _preparedValue;
private int _preparedIntOffset;
private int _preparedValueOffset;
private ODBC32.SQL_C _prepared_Sql_C_Type;
public OdbcParameter() : base()
{
// uses System.Threading!
}
public OdbcParameter(string name, object value) : this()
{
ParameterName = name;
Value = value;
}
public OdbcParameter(string name, OdbcType type) : this()
{
ParameterName = name;
OdbcType = type;
}
public OdbcParameter(string name, OdbcType type, int size) : this()
{
ParameterName = name;
OdbcType = type;
Size = size;
}
public OdbcParameter(string name, OdbcType type, int size, string sourcecolumn) : this()
{
ParameterName = name;
OdbcType = type;
Size = size;
SourceColumn = sourcecolumn;
}
[EditorBrowsableAttribute(EditorBrowsableState.Advanced)] // MDAC 69508
public OdbcParameter(string parameterName,
OdbcType odbcType,
int size,
ParameterDirection parameterDirection,
bool isNullable,
byte precision,
byte scale,
string srcColumn,
DataRowVersion srcVersion,
object value
) : this()
{ // V1.0 everything
this.ParameterName = parameterName;
this.OdbcType = odbcType;
this.Size = size;
this.Direction = parameterDirection;
this.IsNullable = isNullable;
PrecisionInternal = precision;
ScaleInternal = scale;
this.SourceColumn = srcColumn;
this.SourceVersion = srcVersion;
this.Value = value;
}
[EditorBrowsableAttribute(EditorBrowsableState.Advanced)] // MDAC 69508
public OdbcParameter(string parameterName,
OdbcType odbcType, int size,
ParameterDirection parameterDirection,
byte precision, byte scale,
string sourceColumn, DataRowVersion sourceVersion, bool sourceColumnNullMapping,
object value) : this()
{ // V2.0 everything - round trip all browsable properties + precision/scale
this.ParameterName = parameterName;
this.OdbcType = odbcType;
this.Size = size;
this.Direction = parameterDirection;
this.PrecisionInternal = precision;
this.ScaleInternal = scale;
this.SourceColumn = sourceColumn;
this.SourceVersion = sourceVersion;
this.SourceColumnNullMapping = sourceColumnNullMapping;
this.Value = value;
}
public override System.Data.DbType DbType
{
get
{
if (_userSpecifiedType)
{
return _typemap._dbType;
}
return TypeMap._NVarChar._dbType; // default type
}
set
{
if ((null == _typemap) || (_typemap._dbType != value))
{
PropertyTypeChanging();
_typemap = TypeMap.FromDbType(value);
_userSpecifiedType = true;
}
}
}
public override void ResetDbType()
{
ResetOdbcType();
}
[
DefaultValue(OdbcType.NChar),
System.Data.Common.DbProviderSpecificTypePropertyAttribute(true),
]
public OdbcType OdbcType
{
get
{
if (_userSpecifiedType)
{
return _typemap._odbcType;
}
return TypeMap._NVarChar._odbcType; // default type
}
set
{
if ((null == _typemap) || (_typemap._odbcType != value))
{
PropertyTypeChanging();
_typemap = TypeMap.FromOdbcType(value);
_userSpecifiedType = true;
}
}
}
public void ResetOdbcType()
{
PropertyTypeChanging();
_typemap = null;
_userSpecifiedType = false;
}
internal bool HasChanged
{
set
{
_hasChanged = value;
}
}
internal bool UserSpecifiedType
{
get
{
return _userSpecifiedType;
}
}
public override string ParameterName
{ // V1.2.3300, XXXParameter V1.0.3300
get
{
string parameterName = _parameterName;
return ((null != parameterName) ? parameterName : ADP.StrEmpty);
}
set
{
if (_parameterName != value)
{
PropertyChanging();
_parameterName = value;
}
}
}
public new byte Precision
{
get
{
return PrecisionInternal;
}
set
{
PrecisionInternal = value;
}
}
internal byte PrecisionInternal
{
get
{
byte precision = _precision;
if (0 == precision)
{
precision = ValuePrecision(Value);
}
return precision;
}
set
{
if (_precision != value)
{
PropertyChanging();
_precision = value;
}
}
}
private bool ShouldSerializePrecision()
{
return (0 != _precision);
}
public new byte Scale
{
get
{
return ScaleInternal;
}
set
{
ScaleInternal = value;
}
}
internal byte ScaleInternal
{
get
{
byte scale = _scale;
if (!ShouldSerializeScale(scale))
{ // WebData 94688
scale = ValueScale(Value);
}
return scale;
}
set
{
if (_scale != value || !_hasScale)
{
PropertyChanging();
_scale = value;
_hasScale = true;
}
}
}
private bool ShouldSerializeScale()
{
return ShouldSerializeScale(_scale);
}
private bool ShouldSerializeScale(byte scale)
{
return _hasScale && ((0 != scale) || ShouldSerializePrecision());
}
// returns the count of bytes for the data (ColumnSize argument to SqlBindParameter)
private int GetColumnSize(object value, int offset, int ordinal)
{
if ((ODBC32.SQL_C.NUMERIC == _bindtype._sql_c) && (0 != _internalPrecision))
{
return Math.Min((int)_internalPrecision, ADP.DecimalMaxPrecision);
}
int cch = _bindtype._columnSize;
if (0 >= cch)
{
if (ODBC32.SQL_C.NUMERIC == _typemap._sql_c)
{
cch = 62; // (DecimalMaxPrecision+sign+terminator)*BytesPerUnicodeCharacter
}
else
{
cch = _internalSize;
if (!_internalShouldSerializeSize || 0x3fffffff <= cch || cch < 0)
{
Debug.Assert((ODBC32.SQL_C.WCHAR == _bindtype._sql_c) || (ODBC32.SQL_C.BINARY == _bindtype._sql_c), "not wchar or binary");
if (!_internalShouldSerializeSize && (0 != (ParameterDirection.Output & _internalDirection)))
{
throw ADP.UninitializedParameterSize(ordinal, _bindtype._type);
}
if ((null == value) || Convert.IsDBNull(value))
{
cch = 0;
}
else if (value is string)
{
cch = ((String)value).Length - offset;
if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize))
{
// restrict output parameters when user set Size to Int32.MaxValue
// to the greater of intput size or 8K
cch = Math.Max(cch, 4 * 1024); // MDAC 69224
}
// the following code causes failure against SQL 6.5
// ERROR [HY104] [Microsoft][ODBC SQL Server Driver]Invalid precision value
//
// the code causes failure if it is NOT there (remark added by [....])
// it causes failure with jet if it is there
//
// MDAC 76227: Code is required for japanese client/server tests.
// If this causes regressions with Jet please doc here including bug#. ([....])
//
if ((ODBC32.SQL_TYPE.CHAR == _bindtype._sql_type)
|| (ODBC32.SQL_TYPE.VARCHAR == _bindtype._sql_type)
|| (ODBC32.SQL_TYPE.LONGVARCHAR == _bindtype._sql_type))
{
cch = System.Text.Encoding.Default.GetMaxByteCount(cch);
}
}
else if (value is char[])
{
cch = ((char[])value).Length - offset;
if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize))
{
cch = Math.Max(cch, 4 * 1024); // MDAC 69224
}
if ((ODBC32.SQL_TYPE.CHAR == _bindtype._sql_type)
|| (ODBC32.SQL_TYPE.VARCHAR == _bindtype._sql_type)
|| (ODBC32.SQL_TYPE.LONGVARCHAR == _bindtype._sql_type))
{
cch = System.Text.Encoding.Default.GetMaxByteCount(cch);
}
}
else if (value is byte[])
{
cch = ((byte[])value).Length - offset;
if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize))
{
// restrict output parameters when user set Size to Int32.MaxValue
// to the greater of intput size or 8K
cch = Math.Max(cch, 8 * 1024); // MDAC 69224
}
}
#if DEBUG
else { Debug.Fail("not expecting this"); }
#endif
// Note: ColumnSize should never be 0,
// this represents the size of the column on the backend.
//
// without the following code causes failure
//ERROR [HY104] [Microsoft][ODBC Microsoft Access Driver]Invalid precision value
cch = Math.Max(2, cch);
}
}
}
Debug.Assert((0 <= cch) && (cch < 0x3fffffff), $"GetColumnSize: cch = {cch} out of range, _internalShouldSerializeSize = {_internalShouldSerializeSize}, _internalSize = {_internalSize}");
return cch;
}
// Return the count of bytes for the data (size in bytes for the native buffer)
//
private int GetValueSize(object value, int offset)
{
if ((ODBC32.SQL_C.NUMERIC == _bindtype._sql_c) && (0 != _internalPrecision))
{
return Math.Min((int)_internalPrecision, ADP.DecimalMaxPrecision);
}
int cch = _bindtype._columnSize;
if (0 >= cch)
{
bool twobytesperunit = false;
if (value is string)
{
cch = ((string)value).Length - offset;
twobytesperunit = true;
}
else if (value is char[])
{
cch = ((char[])value).Length - offset;
twobytesperunit = true;
}
else if (value is byte[])
{
cch = ((byte[])value).Length - offset;
}
else
{
cch = 0;
}
if (_internalShouldSerializeSize && (_internalSize >= 0) && (_internalSize < cch) && (_bindtype == _originalbindtype))
{
cch = _internalSize;
}
if (twobytesperunit)
{
cch *= 2;
}
}
Debug.Assert((0 <= cch) && (cch < 0x3fffffff), $"GetValueSize: cch = {cch} out of range, _internalShouldSerializeSize = {_internalShouldSerializeSize}, _internalSize = {_internalSize}");
return cch;
}
// return the count of bytes for the data, used for SQLBindParameter
//
private int GetParameterSize(object value, int offset, int ordinal)
{
int ccb = _bindtype._bufferSize;
if (0 >= ccb)
{
if (ODBC32.SQL_C.NUMERIC == _typemap._sql_c)
{
ccb = 518; // _bindtype would be VarChar ([0-9]?{255} + '-' + '.') * 2
}
else
{
ccb = _internalSize;
if (!_internalShouldSerializeSize || (0x3fffffff <= ccb) || (ccb < 0))
{
Debug.Assert((ODBC32.SQL_C.WCHAR == _bindtype._sql_c) || (ODBC32.SQL_C.BINARY == _bindtype._sql_c), "not wchar or binary");
if ((ccb <= 0) && (0 != (ParameterDirection.Output & _internalDirection)))
{
throw ADP.UninitializedParameterSize(ordinal, _bindtype._type);
}
if ((null == value) || Convert.IsDBNull(value))
{
if (_bindtype._sql_c == ODBC32.SQL_C.WCHAR)
{
ccb = 2; // allow for null termination
}
else
{
ccb = 0;
}
}
else if (value is string)
{
ccb = (((String)value).Length - offset) * 2 + 2;
}
else if (value is char[])
{
ccb = (((char[])value).Length - offset) * 2 + 2;
}
else if (value is byte[])
{
ccb = ((byte[])value).Length - offset;
}
#if DEBUG
else { Debug.Fail("not expecting this"); }
#endif
if ((0 != (ParameterDirection.Output & _internalDirection)) && (0x3fffffff <= _internalSize))
{
// restrict output parameters when user set Size to Int32.MaxValue
// to the greater of intput size or 8K
ccb = Math.Max(ccb, 8 * 1024); // MDAC 69224
}
}
else if (ODBC32.SQL_C.WCHAR == _bindtype._sql_c)
{
if ((value is string) && (ccb < ((String)value).Length) && (_bindtype == _originalbindtype))
{
// silently truncate ... MDAC 84408 ... do not truncate upgraded values ... MDAC 84706
ccb = ((String)value).Length;
}
ccb = (ccb * 2) + 2; // allow for null termination
}
else if ((value is byte[]) && (ccb < ((byte[])value).Length) && (_bindtype == _originalbindtype))
{
// silently truncate ... MDAC 84408 ... do not truncate upgraded values ... MDAC 84706
ccb = ((byte[])value).Length;
}
}
}
Debug.Assert((0 <= ccb) && (ccb < 0x3fffffff), "GetParameterSize: out of range " + ccb);
return ccb;
}
private byte GetParameterPrecision(object value)
{
if (0 != _internalPrecision && value is decimal)
{
// from qfe 762
if (_internalPrecision < 29)
{
// from SqlClient ...
if (_internalPrecision != 0)
{
// devnote: If the userspecified precision (_internalPrecision) is less than the actual values precision
// we silently adjust the userspecified precision to the values precision.
byte precision = ((SqlDecimal)(decimal)value).Precision;
_internalPrecision = Math.Max(_internalPrecision, precision); // silently adjust the precision
}
return _internalPrecision;
}
return ADP.DecimalMaxPrecision;
}
if ((null == value) || (value is decimal) || Convert.IsDBNull(value))
{ // MDAC 60882
return ADP.DecimalMaxPrecision28;
}
return 0;
}
private byte GetParameterScale(object value)
{
// For any value that is not decimal simply return the Scale
//
if (!(value is decimal))
{
return _internalScale;
}
// Determin the values scale
// If the user specified a lower scale we return the user specified scale,
// otherwise the values scale
//
byte s = (byte)((decimal.GetBits((decimal)value)[3] & 0x00ff0000) >> 0x10);
if ((_internalScale > 0) && (_internalScale < s))
{
return _internalScale;
}
return s;
}
//This is required for OdbcCommand.Clone to deep copy the parameters collection
object ICloneable.Clone()
{
return new OdbcParameter(this);
}
private void CopyParameterInternal()
{
_internalValue = Value;
// we should coerce the parameter value at this time.
_internalPrecision = ShouldSerializePrecision() ? PrecisionInternal : ValuePrecision(_internalValue);
_internalShouldSerializeSize = ShouldSerializeSize();
_internalSize = _internalShouldSerializeSize ? Size : ValueSize(_internalValue);
_internalDirection = Direction;
_internalScale = ShouldSerializeScale() ? ScaleInternal : ValueScale(_internalValue);
_internalOffset = Offset;
_internalUserSpecifiedType = UserSpecifiedType;
}
private void CloneHelper(OdbcParameter destination)
{
CloneHelperCore(destination);
destination._userSpecifiedType = _userSpecifiedType;
destination._typemap = _typemap;
destination._parameterName = _parameterName;
destination._precision = _precision;
destination._scale = _scale;
destination._hasScale = _hasScale;
}
internal void ClearBinding()
{
if (!_userSpecifiedType)
{
_typemap = null;
}
_bindtype = null;
}
internal void PrepareForBind(OdbcCommand command, short ordinal, ref int parameterBufferSize)
{
// make a snapshot of the current properties. Properties may change while we work on them
//
CopyParameterInternal();
object value = ProcessAndGetParameterValue();
int offset = _internalOffset;
int size = _internalSize;
ODBC32.SQL_C sql_c_type;
// offset validation based on the values type
//
if (offset > 0)
{
if (value is string)
{
if (offset > ((string)value).Length)
{
throw ADP.OffsetOutOfRangeException();
}
}
else if (value is char[])
{
if (offset > ((char[])value).Length)
{
throw ADP.OffsetOutOfRangeException();
}
}
else if (value is byte[])
{
if (offset > ((byte[])value).Length)
{
throw ADP.OffsetOutOfRangeException();
}
}
else
{
// for all other types offset has no meaning
// this is important since we might upgrade some types to strings
offset = 0;
}
}
// type support verification for certain data types
//
switch (_bindtype._sql_type)
{
case ODBC32.SQL_TYPE.DECIMAL:
case ODBC32.SQL_TYPE.NUMERIC:
if (
!command.Connection.IsV3Driver // for non V3 driver we always do the conversion
|| !command.Connection.TestTypeSupport(ODBC32.SQL_TYPE.NUMERIC) // otherwise we convert if the driver does not support numeric
|| command.Connection.TestRestrictedSqlBindType(_bindtype._sql_type)// or the type is not supported
)
{
// No support for NUMERIC
// Change the type
_bindtype = TypeMap._VarChar;
if ((null != value) && !Convert.IsDBNull(value))
{
value = ((Decimal)value).ToString(CultureInfo.CurrentCulture);
size = ((string)value).Length;
offset = 0;
}
}
break;
case ODBC32.SQL_TYPE.BIGINT:
if (!command.Connection.IsV3Driver)
{
// No support for BIGINT
// Change the type
_bindtype = TypeMap._VarChar;
if ((null != value) && !Convert.IsDBNull(value))
{
value = ((Int64)value).ToString(CultureInfo.CurrentCulture);
size = ((string)value).Length;
offset = 0;
}
}
break;
case ODBC32.SQL_TYPE.WCHAR: // MDAC 68993
case ODBC32.SQL_TYPE.WVARCHAR:
case ODBC32.SQL_TYPE.WLONGVARCHAR:
if (value is char)
{
value = value.ToString();
size = ((string)value).Length;
offset = 0;
}
if (!command.Connection.TestTypeSupport(_bindtype._sql_type))
{
// No support for WCHAR, WVARCHAR or WLONGVARCHAR
// Change the type
if (ODBC32.SQL_TYPE.WCHAR == _bindtype._sql_type) { _bindtype = TypeMap._Char; }
else if (ODBC32.SQL_TYPE.WVARCHAR == _bindtype._sql_type) { _bindtype = TypeMap._VarChar; }
else if (ODBC32.SQL_TYPE.WLONGVARCHAR == _bindtype._sql_type)
{
_bindtype = TypeMap._Text;
}
}
break;
} // end switch
// Conversation from WCHAR to CHAR, VARCHAR or LONVARCHAR (AnsiString) is different for some providers
// we need to chonvert WCHAR to CHAR and bind as sql_c_type = CHAR
//
sql_c_type = _bindtype._sql_c;
if (!command.Connection.IsV3Driver)
{
if (sql_c_type == ODBC32.SQL_C.WCHAR)
{
sql_c_type = ODBC32.SQL_C.CHAR;
if (null != value)
{
if (!Convert.IsDBNull(value) && value is string)
{
int lcid = System.Globalization.CultureInfo.CurrentCulture.LCID;
CultureInfo culInfo = new CultureInfo(lcid);
Encoding cpe = System.Text.Encoding.GetEncoding(culInfo.TextInfo.ANSICodePage);
value = cpe.GetBytes(value.ToString());
size = ((byte[])value).Length;
}
}
}
};
int cbParameterSize = GetParameterSize(value, offset, ordinal); // count of bytes for the data, for SQLBindParameter
// Upgrade input value type if the size of input value is bigger than the max size of the input value type.
switch (_bindtype._sql_type)
{
case ODBC32.SQL_TYPE.VARBINARY:
// Max length of VARBINARY is 8,000 of byte array.
if (size > 8000)
{
_bindtype = TypeMap._Image; // will change to LONGVARBINARY
}
break;
case ODBC32.SQL_TYPE.VARCHAR:
// Max length of VARCHAR is 8,000 of non-unicode characters.
if (size > 8000)
{
_bindtype = TypeMap._Text; // will change to LONGVARCHAR
}
break;
case ODBC32.SQL_TYPE.WVARCHAR:
// Max length of WVARCHAR (NVARCHAR) is 4,000 of unicode characters.
if (size > 4000)
{
_bindtype = TypeMap._NText; // will change to WLONGVARCHAR
}
break;
}
_prepared_Sql_C_Type = sql_c_type;
_preparedOffset = offset;
_preparedSize = size;
_preparedValue = value;
_preparedBufferSize = cbParameterSize;
_preparedIntOffset = parameterBufferSize;
_preparedValueOffset = _preparedIntOffset + IntPtr.Size;
parameterBufferSize += (cbParameterSize + IntPtr.Size);
}
internal void Bind(OdbcStatementHandle hstmt, OdbcCommand command, short ordinal, CNativeBuffer parameterBuffer, bool allowReentrance)
{
ODBC32.RetCode retcode;
ODBC32.SQL_C sql_c_type = _prepared_Sql_C_Type;
ODBC32.SQL_PARAM sqldirection = SqlDirectionFromParameterDirection();
int offset = _preparedOffset;
int size = _preparedSize;
object value = _preparedValue;
int cbValueSize = GetValueSize(value, offset); // count of bytes for the data
int cchSize = GetColumnSize(value, offset, ordinal); // count of bytes for the data, used to allocate the buffer length
byte precision = GetParameterPrecision(value);
byte scale = GetParameterScale(value);
int cbActual;
HandleRef valueBuffer = parameterBuffer.PtrOffset(_preparedValueOffset, _preparedBufferSize);
HandleRef intBuffer = parameterBuffer.PtrOffset(_preparedIntOffset, IntPtr.Size);
// for the numeric datatype we need to do some special case handling ...
//
if (ODBC32.SQL_C.NUMERIC == sql_c_type)
{
// for input/output parameters we need to adjust the scale of the input value since the convert function in
// sqlsrv32 takes this scale for the output parameter (possible bug in sqlsrv32?)
//
if ((ODBC32.SQL_PARAM.INPUT_OUTPUT == sqldirection) && (value is decimal))
{
if (scale < _internalScale)
{
while (scale < _internalScale)
{
value = ((decimal)value) * 10;
scale++;
}
}
}
SetInputValue(value, sql_c_type, cbValueSize, precision, 0, parameterBuffer);
// for output parameters we need to write precision and scale to the buffer since the convert function in
// sqlsrv32 expects these values there (possible bug in sqlsrv32?)
//
if (ODBC32.SQL_PARAM.INPUT != sqldirection)
{
parameterBuffer.WriteInt16(_preparedValueOffset, (short)(((ushort)scale << 8) | (ushort)precision));
}
}
else
{
SetInputValue(value, sql_c_type, cbValueSize, size, offset, parameterBuffer);
}
// Try to reuse existing bindings if
// the binding is valid (means we already went through binding all parameters)
// the parametercollection is bound already
// the bindtype ParameterType did not change (forced upgrade)
if (!_hasChanged
&& (_boundSqlCType == sql_c_type)
&& (_boundParameterType == _bindtype._sql_type)
&& (_boundSize == cchSize)
&& (_boundScale == scale)
&& (_boundBuffer == valueBuffer.Handle)
&& (_boundIntbuffer == intBuffer.Handle)
)
{
return;
}
//SQLBindParameter
retcode = hstmt.BindParameter(
ordinal, // Parameter Number
(short)sqldirection, // InputOutputType
sql_c_type, // ValueType
_bindtype._sql_type, // ParameterType
(IntPtr)cchSize, // ColumnSize
(IntPtr)scale, // DecimalDigits
valueBuffer, // ParameterValuePtr
(IntPtr)_preparedBufferSize,
intBuffer); // StrLen_or_IndPtr
if (ODBC32.RetCode.SUCCESS != retcode)
{
if ("07006" == command.GetDiagSqlState())
{
command.Connection.FlagRestrictedSqlBindType(_bindtype._sql_type);
if (allowReentrance)
{
this.Bind(hstmt, command, ordinal, parameterBuffer, false);
return;
}
}
command.Connection.HandleError(hstmt, retcode);
}
_hasChanged = false;
_boundSqlCType = sql_c_type;
_boundParameterType = _bindtype._sql_type;
_boundSize = cchSize;
_boundScale = scale;
_boundBuffer = valueBuffer.Handle;
_boundIntbuffer = intBuffer.Handle;
if (ODBC32.SQL_C.NUMERIC == sql_c_type)
{
OdbcDescriptorHandle hdesc = command.GetDescriptorHandle(ODBC32.SQL_ATTR.APP_PARAM_DESC);
// descriptor handle is cached on command wrapper, don't release it
// Set descriptor Type
//
//SQLSetDescField(hdesc, i+1, SQL_DESC_TYPE, (void *)SQL_C_NUMERIC, 0);
retcode = hdesc.SetDescriptionField1(ordinal, ODBC32.SQL_DESC.TYPE, (IntPtr)ODBC32.SQL_C.NUMERIC);
if (ODBC32.RetCode.SUCCESS != retcode)
{
command.Connection.HandleError(hstmt, retcode);
}
// Set precision
//
cbActual = (int)precision;
//SQLSetDescField(hdesc, i+1, SQL_DESC_PRECISION, (void *)precision, 0);
retcode = hdesc.SetDescriptionField1(ordinal, ODBC32.SQL_DESC.PRECISION, (IntPtr)cbActual);
if (ODBC32.RetCode.SUCCESS != retcode)
{
command.Connection.HandleError(hstmt, retcode);
}
// Set scale
//
// SQLSetDescField(hdesc, i+1, SQL_DESC_SCALE, (void *)llen, 0);
cbActual = (int)scale;
retcode = hdesc.SetDescriptionField1(ordinal, ODBC32.SQL_DESC.SCALE, (IntPtr)cbActual);
if (ODBC32.RetCode.SUCCESS != retcode)
{
command.Connection.HandleError(hstmt, retcode);
}
// Set data pointer
//
// SQLSetDescField(hdesc, i+1, SQL_DESC_DATA_PTR, (void *)&numeric, 0);
retcode = hdesc.SetDescriptionField2(ordinal, ODBC32.SQL_DESC.DATA_PTR, valueBuffer);
if (ODBC32.RetCode.SUCCESS != retcode)
{
command.Connection.HandleError(hstmt, retcode);
}
}
}
internal void GetOutputValue(CNativeBuffer parameterBuffer)
{ //Handle any output params
// No value is available if the user fiddles with the parameters properties
//
if (_hasChanged) return;
if ((null != _bindtype) && (_internalDirection != ParameterDirection.Input))
{
TypeMap typemap = _bindtype;
_bindtype = null;
int cbActual = (int)parameterBuffer.ReadIntPtr(_preparedIntOffset);
if (ODBC32.SQL_NULL_DATA == cbActual)
{
Value = DBNull.Value;
}
else if ((0 <= cbActual) || (cbActual == ODBC32.SQL_NTS))
{ // safeguard
Value = parameterBuffer.MarshalToManaged(_preparedValueOffset, _boundSqlCType, cbActual);
if (_boundSqlCType == ODBC32.SQL_C.CHAR)
{
if ((null != Value) && !Convert.IsDBNull(Value))
{
int lcid = System.Globalization.CultureInfo.CurrentCulture.LCID;
CultureInfo culInfo = new CultureInfo(lcid);
Encoding cpe = System.Text.Encoding.GetEncoding(culInfo.TextInfo.ANSICodePage);
Value = cpe.GetString((byte[])Value);
}
}
if ((typemap != _typemap) && (null != Value) && !Convert.IsDBNull(Value) && (Value.GetType() != _typemap._type))
{
Debug.Assert(ODBC32.SQL_C.NUMERIC == _typemap._sql_c, "unexpected");
Value = decimal.Parse((string)Value, System.Globalization.CultureInfo.CurrentCulture);
}
}
}
}
private object ProcessAndGetParameterValue()
{
object value = _internalValue;
if (_internalUserSpecifiedType)
{
if ((null != value) && !Convert.IsDBNull(value))
{
Type valueType = value.GetType();
if (!valueType.IsArray)
{
if (valueType != _typemap._type)
{
try
{
value = Convert.ChangeType(value, _typemap._type, (System.IFormatProvider)null);
}
catch (Exception e)
{
// Don't know which exception to expect from ChangeType so we filter out the serious ones
//
if (!ADP.IsCatchableExceptionType(e))
{
throw;
}
throw ADP.ParameterConversionFailed(value, _typemap._type, e); // WebData 75433
}
}
}
else if (valueType == typeof(char[]))
{
value = new string((char[])value);
}
}
}
else if (null == _typemap)
{
if ((null == value) || Convert.IsDBNull(value))
{
_typemap = TypeMap._NVarChar; // default type
}
else
{
Type type = value.GetType();
_typemap = TypeMap.FromSystemType(type);
}
}
Debug.Assert(null != _typemap, "GetParameterValue: null _typemap");
_originalbindtype = _bindtype = _typemap;
return value;
}
private void PropertyChanging()
{
_hasChanged = true;
}
private void PropertyTypeChanging()
{
PropertyChanging();
//CoercedValue = null;
}
internal void SetInputValue(object value, ODBC32.SQL_C sql_c_type, int cbsize, int sizeorprecision, int offset, CNativeBuffer parameterBuffer)
{ //Handle any input params
if ((ParameterDirection.Input == _internalDirection) || (ParameterDirection.InputOutput == _internalDirection))
{
//Note: (lang) "null" means to use the servers default (not DBNull).
//We probably should just not have bound this parameter, period, but that
//would mess up the users question marks, etc...
if ((null == value))
{
parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)ODBC32.SQL_DEFAULT_PARAM);
}
else if (Convert.IsDBNull(value))
{
parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)ODBC32.SQL_NULL_DATA);
}
else
{
switch (sql_c_type)
{
case ODBC32.SQL_C.CHAR:
case ODBC32.SQL_C.WCHAR:
case ODBC32.SQL_C.BINARY:
//StrLen_or_IndPtr is ignored except for Character or Binary or data.
parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)cbsize);
break;
default:
parameterBuffer.WriteIntPtr(_preparedIntOffset, IntPtr.Zero);
break;
}
//Place the input param value into the native buffer
parameterBuffer.MarshalToNative(_preparedValueOffset, value, sql_c_type, sizeorprecision, offset);
}
}
else
{
// always set ouput only and return value parameter values to null when executing
_internalValue = null;
//Always initialize the intbuffer (for output params). Since we need to know
//if/when the parameters are available for output. (ie: when is the buffer valid...)
//if (_sqldirection != ODBC32.SQL_PARAM.INPUT)
parameterBuffer.WriteIntPtr(_preparedIntOffset, (IntPtr)ODBC32.SQL_NULL_DATA);
}
}
private ODBC32.SQL_PARAM SqlDirectionFromParameterDirection()
{
switch (_internalDirection)
{
case ParameterDirection.Input:
return ODBC32.SQL_PARAM.INPUT;
case ParameterDirection.Output:
case ParameterDirection.ReturnValue:
//ODBC doesn't seem to distinguish between output and return value
//as SQL_PARAM_RETURN_VALUE fails with "Invalid parameter type"
return ODBC32.SQL_PARAM.OUTPUT;
case ParameterDirection.InputOutput:
return ODBC32.SQL_PARAM.INPUT_OUTPUT;
default:
Debug.Fail("Unexpected Direction Property on Parameter");
return ODBC32.SQL_PARAM.INPUT;
}
}
public override object Value
{ // V1.2.3300, XXXParameter V1.0.3300
get
{
return _value;
}
set
{
_coercedValue = null;
_value = value;
}
}
private byte ValuePrecision(object value)
{
return ValuePrecisionCore(value);
}
private byte ValueScale(object value)
{
return ValueScaleCore(value);
}
private int ValueSize(object value)
{
return ValueSizeCore(value);
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Azure;
using Microsoft.Azure.Management.Sql.LegacySdk;
using Microsoft.Azure.Management.Sql.LegacySdk.Models;
namespace Microsoft.Azure.Management.Sql.LegacySdk
{
/// <summary>
/// The Windows Azure SQL Database management API provides a RESTful set of
/// web services that interact with Windows Azure SQL Database services to
/// manage your databases. The API enables users to create, retrieve,
/// update, and delete databases and servers.
/// </summary>
public static partial class ServerDisasterRecoveryConfigurationOperationsExtensions
{
/// <summary>
/// Begins creating a new or updating an existing Azure SQL Server
/// disaster recovery configuration. To determine the status of the
/// operation call
/// GetServerDisasterRecoveryConfigurationOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL Server disaster recovery
/// configuration to be operated on (Updated or created).
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for creating or updating a Server
/// disaster recovery configuration.
/// </param>
/// <returns>
/// Response for long running Azure Sql server disaster recovery
/// configuration operation.
/// </returns>
public static ServerDisasterRecoveryConfigurationCreateOrUpdateResponse BeginCreateOrUpdate(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName, ServerDisasterRecoveryConfigurationCreateOrUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).BeginCreateOrUpdateAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begins creating a new or updating an existing Azure SQL Server
/// disaster recovery configuration. To determine the status of the
/// operation call
/// GetServerDisasterRecoveryConfigurationOperationStatus.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL Server disaster recovery
/// configuration to be operated on (Updated or created).
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for creating or updating a Server
/// disaster recovery configuration.
/// </param>
/// <returns>
/// Response for long running Azure Sql server disaster recovery
/// configuration operation.
/// </returns>
public static Task<ServerDisasterRecoveryConfigurationCreateOrUpdateResponse> BeginCreateOrUpdateAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName, ServerDisasterRecoveryConfigurationCreateOrUpdateParameters parameters)
{
return operations.BeginCreateOrUpdateAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, parameters, CancellationToken.None);
}
/// <summary>
/// Creates a new or updates an existing Azure SQL Server disaster
/// recovery configuration.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Database Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL Server disaster recovery
/// configuration to be operated on (Updated or created).
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for creating or updating a Server
/// disaster recovery configuration.
/// </param>
/// <returns>
/// Response for long running Azure Sql server disaster recovery
/// configuration operation.
/// </returns>
public static ServerDisasterRecoveryConfigurationCreateOrUpdateResponse CreateOrUpdate(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName, ServerDisasterRecoveryConfigurationCreateOrUpdateParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).CreateOrUpdateAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Creates a new or updates an existing Azure SQL Server disaster
/// recovery configuration.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Database Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL Server disaster recovery
/// configuration to be operated on (Updated or created).
/// </param>
/// <param name='parameters'>
/// Required. The required parameters for creating or updating a Server
/// disaster recovery configuration.
/// </param>
/// <returns>
/// Response for long running Azure Sql server disaster recovery
/// configuration operation.
/// </returns>
public static Task<ServerDisasterRecoveryConfigurationCreateOrUpdateResponse> CreateOrUpdateAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName, ServerDisasterRecoveryConfigurationCreateOrUpdateParameters parameters)
{
return operations.CreateOrUpdateAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, parameters, CancellationToken.None);
}
/// <summary>
/// Deletes the Azure SQL server disaster recovery configuration with
/// the given name.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to be deleted.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Delete(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).DeleteAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Deletes the Azure SQL server disaster recovery configuration with
/// the given name.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to be deleted.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> DeleteAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return operations.DeleteAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, CancellationToken.None);
}
/// <summary>
/// Begins failover for the Azure SQL server disaster recovery
/// configuration with the given name.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to start failover.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse Failover(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).FailoverAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begins failover for the Azure SQL server disaster recovery
/// configuration with the given name.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to start failover.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> FailoverAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return operations.FailoverAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, CancellationToken.None);
}
/// <summary>
/// Begins failover for the Azure SQL server disaster recovery
/// configuration with the given name.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to start failover.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static AzureOperationResponse FailoverAllowDataLoss(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).FailoverAllowDataLossAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Begins failover for the Azure SQL server disaster recovery
/// configuration with the given name.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to start failover.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<AzureOperationResponse> FailoverAllowDataLossAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return operations.FailoverAllowDataLossAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, CancellationToken.None);
}
/// <summary>
/// Returns information about an Azure SQL Server disaster recovery
/// configurations.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to be retrieved.
/// </param>
/// <returns>
/// Represents the response to a get server disaster recovery
/// configuration request.
/// </returns>
public static ServerDisasterRecoveryConfigurationGetResponse Get(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).GetAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about an Azure SQL Server disaster recovery
/// configurations.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the server
/// belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <param name='serverDisasterRecoveryConfigurationName'>
/// Required. The name of the Azure SQL server disaster recovery
/// configuration to be retrieved.
/// </param>
/// <returns>
/// Represents the response to a get server disaster recovery
/// configuration request.
/// </returns>
public static Task<ServerDisasterRecoveryConfigurationGetResponse> GetAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName, string serverDisasterRecoveryConfigurationName)
{
return operations.GetAsync(resourceGroupName, serverName, serverDisasterRecoveryConfigurationName, CancellationToken.None);
}
/// <summary>
/// Gets the status of an Azure Sql Server disaster recovery
/// configuration create or update operation.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation
/// </param>
/// <returns>
/// Response for long running Azure Sql server disaster recovery
/// configuration operation.
/// </returns>
public static ServerDisasterRecoveryConfigurationCreateOrUpdateResponse GetServerDisasterRecoveryConfigurationOperationStatus(this IServerDisasterRecoveryConfigurationOperations operations, string operationStatusLink)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).GetServerDisasterRecoveryConfigurationOperationStatusAsync(operationStatusLink);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Gets the status of an Azure Sql Server disaster recovery
/// configuration create or update operation.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='operationStatusLink'>
/// Required. Location value returned by the Begin operation
/// </param>
/// <returns>
/// Response for long running Azure Sql server disaster recovery
/// configuration operation.
/// </returns>
public static Task<ServerDisasterRecoveryConfigurationCreateOrUpdateResponse> GetServerDisasterRecoveryConfigurationOperationStatusAsync(this IServerDisasterRecoveryConfigurationOperations operations, string operationStatusLink)
{
return operations.GetServerDisasterRecoveryConfigurationOperationStatusAsync(operationStatusLink, CancellationToken.None);
}
/// <summary>
/// Returns information about Azure SQL Server disaster recovery
/// configurations.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Server disaster
/// recovery configuration request.
/// </returns>
public static ServerDisasterRecoveryConfigurationListResponse List(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName)
{
return Task.Factory.StartNew((object s) =>
{
return ((IServerDisasterRecoveryConfigurationOperations)s).ListAsync(resourceGroupName, serverName);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// Returns information about Azure SQL Server disaster recovery
/// configurations.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.Azure.Management.Sql.LegacySdk.IServerDisasterRecoveryConfigurationOperations.
/// </param>
/// <param name='resourceGroupName'>
/// Required. The name of the Resource Group to which the Azure SQL
/// Server belongs.
/// </param>
/// <param name='serverName'>
/// Required. The name of the Azure SQL Server.
/// </param>
/// <returns>
/// Represents the response to a List Azure Sql Server disaster
/// recovery configuration request.
/// </returns>
public static Task<ServerDisasterRecoveryConfigurationListResponse> ListAsync(this IServerDisasterRecoveryConfigurationOperations operations, string resourceGroupName, string serverName)
{
return operations.ListAsync(resourceGroupName, serverName, CancellationToken.None);
}
}
}
| |
/*
The MIT License (MIT)
Copyright (c) 2018 Helix Toolkit contributors
*/
using System;
using SharpDX.Direct3D11;
using System.Runtime.CompilerServices;
#if !NETFX_CORE
namespace HelixToolkit.Wpf.SharpDX
#else
#if CORE
namespace HelixToolkit.SharpDX.Core
#else
namespace HelixToolkit.UWP
#endif
#endif
{
namespace Core
{
using Utilities;
using Render;
using Shaders;
/// <summary>
///
/// </summary>
public abstract class GeometryRenderCore : RenderCore, IGeometryRenderCore
{
private RasterizerStateProxy rasterState = null;
/// <summary>
///
/// </summary>
public RasterizerStateProxy RasterState { get { return rasterState; } }
private RasterizerStateProxy invertCullModeState = null;
public RasterizerStateProxy InvertCullModeState { get { return invertCullModeState; } }
private IElementsBufferModel instanceBuffer = MatrixInstanceBufferModel.Empty;
/// <summary>
///
/// </summary>
public IElementsBufferModel InstanceBuffer
{
set
{
var old = instanceBuffer;
if(SetAffectsCanRenderFlag(ref instanceBuffer, value))
{
if (old != null)
{
old.ElementChanged -= OnElementChanged;
}
if (instanceBuffer != null)
{
instanceBuffer.ElementChanged += OnElementChanged;
}
else
{
instanceBuffer = MatrixInstanceBufferModel.Empty;
}
}
}
get
{
return instanceBuffer;
}
}
private IAttachableBufferModel geometryBuffer;
/// <summary>
///
/// </summary>
public IAttachableBufferModel GeometryBuffer
{
set
{
if(SetAffectsCanRenderFlag(ref geometryBuffer, value))
{
OnGeometryBufferChanged(value);
}
}
get { return geometryBuffer; }
}
private RasterizerStateDescription rasterDescription = new RasterizerStateDescription()
{
FillMode = FillMode.Solid,
CullMode = CullMode.None,
};
/// <summary>
///
/// </summary>
public RasterizerStateDescription RasterDescription
{
set
{
if(SetAffectsRender(ref rasterDescription, value) && IsAttached)
{
CreateRasterState(value, false);
}
}
get
{
return rasterDescription;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="GeometryRenderCore"/> class.
/// </summary>
public GeometryRenderCore() : base(RenderType.Opaque) { }
/// <summary>
/// Initializes a new instance of the <see cref="GeometryRenderCore"/> class.
/// </summary>
/// <param name="renderType">Type of the render.</param>
public GeometryRenderCore(RenderType renderType) : base(renderType) { }
/// <summary>
///
/// </summary>
/// <param name="description"></param>
/// <param name="force"></param>
/// <returns></returns>
protected virtual bool CreateRasterState(RasterizerStateDescription description, bool force)
{
var newRasterState = EffectTechnique.EffectsManager.StateManager.Register(description);
var invCull = description;
if(description.CullMode != CullMode.None)
{
invCull.CullMode = description.CullMode == CullMode.Back ? CullMode.Front : CullMode.Back;
}
var newInvertCullModeState = EffectTechnique.EffectsManager.StateManager.Register(invCull);
RemoveAndDispose(ref rasterState);
RemoveAndDispose(ref invertCullModeState);
rasterState = Collect(newRasterState);
invertCullModeState = Collect(newInvertCullModeState);
return true;
}
/// <summary>
///
/// </summary>
/// <param name="technique"></param>
/// <returns></returns>
protected override bool OnAttach(IRenderTechnique technique)
{
CreateRasterState(rasterDescription, true);
return true;
}
protected override void OnDetach()
{
rasterState = null;
invertCullModeState = null;
base.OnDetach();
}
/// <summary>
/// Called when [geometry buffer changed].
/// </summary>
/// <param name="buffer">The buffer.</param>
protected virtual void OnGeometryBufferChanged(IAttachableBufferModel buffer) { }
/// <summary>
/// Set all necessary states and buffers
/// </summary>
/// <param name="context"></param>
/// <param name="isInvertCullMode"></param>
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected void OnBindRasterState(DeviceContextProxy context, bool isInvertCullMode)
{
context.SetRasterState(!isInvertCullMode ? rasterState : invertCullModeState);
}
/// <summary>
/// Attach vertex buffer routine
/// </summary>
/// <param name="context"></param>
/// <param name="vertStartSlot"></param>
protected virtual bool OnAttachBuffers(DeviceContextProxy context, ref int vertStartSlot)
{
if(GeometryBuffer.AttachBuffers(context, ref vertStartSlot, EffectTechnique.EffectsManager))
{
InstanceBuffer.AttachBuffer(context, ref vertStartSlot);
return true;
}
else
{
return false;
}
}
/// <summary>
/// Called when [update can render flag].
/// </summary>
/// <returns></returns>
protected override bool OnUpdateCanRenderFlag()
{
return base.OnUpdateCanRenderFlag() && GeometryBuffer != null;
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void DrawIndexed(DeviceContextProxy context, IElementsBufferProxy indexBuffer, IElementsBufferModel instanceModel)
{
if (!instanceModel.HasElements)
{
context.DrawIndexed(indexBuffer.ElementCount, 0, 0);
}
else
{
context.DrawIndexedInstanced(indexBuffer.ElementCount, instanceModel.Buffer.ElementCount, 0, 0, 0);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
public static void DrawPoints(DeviceContextProxy context, IElementsBufferProxy vertexBuffer, IElementsBufferModel instanceModel)
{
if (!instanceModel.HasElements)
{
context.Draw(vertexBuffer.ElementCount, 0);
}
else
{
context.DrawInstanced(vertexBuffer.ElementCount, instanceModel.Buffer.ElementCount, 0, 0);
}
}
[MethodImpl(MethodImplOptions.AggressiveInlining)]
protected bool PreRender(RenderContext context, DeviceContextProxy deviceContext)
{
int vertStartSlot = 0;
if (!OnAttachBuffers(deviceContext, ref vertStartSlot))
{
return false;
}
OnBindRasterState(deviceContext, context.IsInvertCullMode);
return CanRenderFlag;
}
/// <summary>
/// Trigger OnRender function delegate if CanRender()==true
/// </summary>
/// <param name="context"></param>
/// <param name="deviceContext"></param>
public sealed override void Render(RenderContext context, DeviceContextProxy deviceContext)
{
if (PreRender(context, deviceContext))
{
OnRender(context, deviceContext);
}
}
public sealed override void RenderShadow(RenderContext context, DeviceContextProxy deviceContext)
{
if (PreRender(context, deviceContext))
{
OnRenderShadow(context, deviceContext);
}
}
public sealed override void RenderCustom(RenderContext context, DeviceContextProxy deviceContext)
{
if (PreRender(context, deviceContext))
{
OnRenderCustom(context, deviceContext);
}
}
public sealed override void RenderDepth(RenderContext context, DeviceContextProxy deviceContext, ShaderPass customPass)
{
if (PreRender(context, deviceContext))
{
OnRenderDepth(context, deviceContext, customPass);
}
}
/// <summary>
/// Called when [render].
/// </summary>
/// <param name="context">The context.</param>
/// <param name="deviceContext">The device context.</param>
protected abstract void OnRender(RenderContext context, DeviceContextProxy deviceContext);
/// <summary>
/// Render function for custom shader pass. Used to do special effects
/// </summary>
protected abstract void OnRenderCustom(RenderContext context, DeviceContextProxy deviceContext);
/// <summary>
/// Called when [render shadow].
/// </summary>
/// <param name="context">The context.</param>
/// <param name="deviceContext"></param>
protected abstract void OnRenderShadow(RenderContext context, DeviceContextProxy deviceContext);
/// <summary>
/// Called when [render depth].
/// </summary>
/// <param name="context">The context.</param>
/// <param name="deviceContext">The device context.</param>
/// <param name="customPass">Custom depth pass</param>
protected abstract void OnRenderDepth(RenderContext context, DeviceContextProxy deviceContext, ShaderPass customPass);
protected void OnElementChanged(object sender, EventArgs e)
{
UpdateCanRenderFlag();
RaiseInvalidateRender();
}
protected void OnInvalidateRendererEvent(object sender, EventArgs e)
{
RaiseInvalidateRender();
}
}
}
}
| |
// CodeContracts
//
// Copyright (c) Microsoft Corporation
//
// All rights reserved.
//
// MIT License
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// File System.Windows.Controls.InkCanvas.cs
// Automatically generated contract file.
using System.Collections.Generic;
using System.IO;
using System.Text;
using System.Diagnostics.Contracts;
using System;
// Disable the "this variable is not used" warning as every field would imply it.
#pragma warning disable 0414
// Disable the "this variable is never assigned to".
#pragma warning disable 0067
// Disable the "this event is never assigned to".
#pragma warning disable 0649
// Disable the "this variable is never used".
#pragma warning disable 0169
// Disable the "new keyword not required" warning.
#pragma warning disable 0109
// Disable the "extern without DllImport" warning.
#pragma warning disable 0626
// Disable the "could hide other member" warning, can happen on certain properties.
#pragma warning disable 0108
namespace System.Windows.Controls
{
public partial class InkCanvas : System.Windows.FrameworkElement, System.Windows.Markup.IAddChild
{
#region Methods and constructors
protected override System.Windows.Size ArrangeOverride(System.Windows.Size arrangeSize)
{
return default(System.Windows.Size);
}
public bool CanPaste()
{
return default(bool);
}
public void CopySelection()
{
}
public void CutSelection()
{
}
public static double GetBottom(System.Windows.UIElement element)
{
return default(double);
}
public System.Collections.ObjectModel.ReadOnlyCollection<System.Windows.Ink.ApplicationGesture> GetEnabledGestures()
{
Contract.Ensures(Contract.Result<System.Collections.ObjectModel.ReadOnlyCollection<System.Windows.Ink.ApplicationGesture>>() != null);
return default(System.Collections.ObjectModel.ReadOnlyCollection<System.Windows.Ink.ApplicationGesture>);
}
public static double GetLeft(System.Windows.UIElement element)
{
return default(double);
}
public static double GetRight(System.Windows.UIElement element)
{
return default(double);
}
public System.Collections.ObjectModel.ReadOnlyCollection<System.Windows.UIElement> GetSelectedElements()
{
return default(System.Collections.ObjectModel.ReadOnlyCollection<System.Windows.UIElement>);
}
public System.Windows.Ink.StrokeCollection GetSelectedStrokes()
{
Contract.Ensures(Contract.Result<System.Windows.Ink.StrokeCollection>() != null);
return default(System.Windows.Ink.StrokeCollection);
}
public System.Windows.Rect GetSelectionBounds()
{
return default(System.Windows.Rect);
}
public static double GetTop(System.Windows.UIElement element)
{
return default(double);
}
protected override System.Windows.Media.Visual GetVisualChild(int index)
{
return default(System.Windows.Media.Visual);
}
protected override System.Windows.Media.HitTestResult HitTestCore(System.Windows.Media.PointHitTestParameters hitTestParams)
{
return default(System.Windows.Media.HitTestResult);
}
public InkCanvasSelectionHitResult HitTestSelection(System.Windows.Point point)
{
return default(InkCanvasSelectionHitResult);
}
public InkCanvas()
{
}
protected override System.Windows.Size MeasureOverride(System.Windows.Size availableSize)
{
return default(System.Windows.Size);
}
protected virtual new void OnActiveEditingModeChanged(System.Windows.RoutedEventArgs e)
{
}
protected override System.Windows.Automation.Peers.AutomationPeer OnCreateAutomationPeer()
{
return default(System.Windows.Automation.Peers.AutomationPeer);
}
protected virtual new void OnDefaultDrawingAttributesReplaced(System.Windows.Ink.DrawingAttributesReplacedEventArgs e)
{
}
protected virtual new void OnEditingModeChanged(System.Windows.RoutedEventArgs e)
{
}
protected virtual new void OnEditingModeInvertedChanged(System.Windows.RoutedEventArgs e)
{
}
protected virtual new void OnGesture(InkCanvasGestureEventArgs e)
{
}
protected override void OnPropertyChanged(System.Windows.DependencyPropertyChangedEventArgs e)
{
}
protected virtual new void OnSelectionChanged(EventArgs e)
{
}
protected virtual new void OnSelectionChanging(InkCanvasSelectionChangingEventArgs e)
{
}
protected virtual new void OnSelectionMoved(EventArgs e)
{
}
protected virtual new void OnSelectionMoving(InkCanvasSelectionEditingEventArgs e)
{
}
protected virtual new void OnSelectionResized(EventArgs e)
{
}
protected virtual new void OnSelectionResizing(InkCanvasSelectionEditingEventArgs e)
{
}
protected virtual new void OnStrokeCollected(InkCanvasStrokeCollectedEventArgs e)
{
}
protected virtual new void OnStrokeErased(System.Windows.RoutedEventArgs e)
{
}
protected virtual new void OnStrokeErasing(InkCanvasStrokeErasingEventArgs e)
{
}
protected virtual new void OnStrokesReplaced(InkCanvasStrokesReplacedEventArgs e)
{
}
public void Paste()
{
}
public void Paste(System.Windows.Point point)
{
Contract.Ensures(!double.IsInfinity(point.X));
Contract.Ensures(!double.IsInfinity(point.Y));
}
public void Select(System.Windows.Ink.StrokeCollection selectedStrokes)
{
}
public void Select(IEnumerable<System.Windows.UIElement> selectedElements)
{
}
public void Select(System.Windows.Ink.StrokeCollection selectedStrokes, IEnumerable<System.Windows.UIElement> selectedElements)
{
}
public static void SetBottom(System.Windows.UIElement element, double length)
{
}
public void SetEnabledGestures(IEnumerable<System.Windows.Ink.ApplicationGesture> applicationGestures)
{
}
public static void SetLeft(System.Windows.UIElement element, double length)
{
}
public static void SetRight(System.Windows.UIElement element, double length)
{
}
public static void SetTop(System.Windows.UIElement element, double length)
{
}
void System.Windows.Markup.IAddChild.AddChild(Object value)
{
}
void System.Windows.Markup.IAddChild.AddText(string textData)
{
}
#endregion
#region Properties and indexers
public InkCanvasEditingMode ActiveEditingMode
{
get
{
return default(InkCanvasEditingMode);
}
}
public System.Windows.Media.Brush Background
{
get
{
return default(System.Windows.Media.Brush);
}
set
{
}
}
public UIElementCollection Children
{
get
{
return default(UIElementCollection);
}
}
public System.Windows.Ink.DrawingAttributes DefaultDrawingAttributes
{
get
{
return default(System.Windows.Ink.DrawingAttributes);
}
set
{
}
}
public System.Windows.Input.StylusPointDescription DefaultStylusPointDescription
{
get
{
return default(System.Windows.Input.StylusPointDescription);
}
set
{
}
}
protected System.Windows.Input.StylusPlugIns.DynamicRenderer DynamicRenderer
{
get
{
return default(System.Windows.Input.StylusPlugIns.DynamicRenderer);
}
set
{
}
}
public InkCanvasEditingMode EditingMode
{
get
{
return default(InkCanvasEditingMode);
}
set
{
}
}
public InkCanvasEditingMode EditingModeInverted
{
get
{
return default(InkCanvasEditingMode);
}
set
{
}
}
public System.Windows.Ink.StylusShape EraserShape
{
get
{
Contract.Ensures(Contract.Result<System.Windows.Ink.StylusShape>() != null);
return default(System.Windows.Ink.StylusShape);
}
set
{
Contract.Requires(value != null);
}
}
protected InkPresenter InkPresenter
{
get
{
Contract.Ensures(Contract.Result<System.Windows.Controls.InkPresenter>() != null);
return default(InkPresenter);
}
}
public bool IsGestureRecognizerAvailable
{
get
{
return default(bool);
}
}
internal protected override System.Collections.IEnumerator LogicalChildren
{
get
{
return default(System.Collections.IEnumerator);
}
}
public bool MoveEnabled
{
get
{
return default(bool);
}
set
{
}
}
public IEnumerable<InkCanvasClipboardFormat> PreferredPasteFormats
{
get
{
return default(IEnumerable<InkCanvasClipboardFormat>);
}
set
{
}
}
public bool ResizeEnabled
{
get
{
return default(bool);
}
set
{
}
}
public System.Windows.Ink.StrokeCollection Strokes
{
get
{
return default(System.Windows.Ink.StrokeCollection);
}
set
{
}
}
public bool UseCustomCursor
{
get
{
return default(bool);
}
set
{
}
}
protected override int VisualChildrenCount
{
get
{
return default(int);
}
}
#endregion
#region Events
public event System.Windows.RoutedEventHandler ActiveEditingModeChanged
{
add
{
}
remove
{
}
}
public event System.Windows.Ink.DrawingAttributesReplacedEventHandler DefaultDrawingAttributesReplaced
{
add
{
}
remove
{
}
}
public event System.Windows.RoutedEventHandler EditingModeChanged
{
add
{
}
remove
{
}
}
public event System.Windows.RoutedEventHandler EditingModeInvertedChanged
{
add
{
}
remove
{
}
}
public event InkCanvasGestureEventHandler Gesture
{
add
{
}
remove
{
}
}
public event EventHandler SelectionChanged
{
add
{
}
remove
{
}
}
public event InkCanvasSelectionChangingEventHandler SelectionChanging
{
add
{
}
remove
{
}
}
public event EventHandler SelectionMoved
{
add
{
}
remove
{
}
}
public event InkCanvasSelectionEditingEventHandler SelectionMoving
{
add
{
}
remove
{
}
}
public event EventHandler SelectionResized
{
add
{
}
remove
{
}
}
public event InkCanvasSelectionEditingEventHandler SelectionResizing
{
add
{
}
remove
{
}
}
public event InkCanvasStrokeCollectedEventHandler StrokeCollected
{
add
{
}
remove
{
}
}
public event System.Windows.RoutedEventHandler StrokeErased
{
add
{
}
remove
{
}
}
public event InkCanvasStrokeErasingEventHandler StrokeErasing
{
add
{
}
remove
{
}
}
public event InkCanvasStrokesReplacedEventHandler StrokesReplaced
{
add
{
}
remove
{
}
}
#endregion
#region Fields
public readonly static System.Windows.RoutedEvent ActiveEditingModeChangedEvent;
public readonly static System.Windows.DependencyProperty ActiveEditingModeProperty;
public readonly static System.Windows.DependencyProperty BackgroundProperty;
public readonly static System.Windows.DependencyProperty BottomProperty;
public readonly static System.Windows.DependencyProperty DefaultDrawingAttributesProperty;
public readonly static System.Windows.RoutedEvent EditingModeChangedEvent;
public readonly static System.Windows.RoutedEvent EditingModeInvertedChangedEvent;
public readonly static System.Windows.DependencyProperty EditingModeInvertedProperty;
public readonly static System.Windows.DependencyProperty EditingModeProperty;
public readonly static System.Windows.RoutedEvent GestureEvent;
public readonly static System.Windows.DependencyProperty LeftProperty;
public readonly static System.Windows.DependencyProperty RightProperty;
public readonly static System.Windows.RoutedEvent StrokeCollectedEvent;
public readonly static System.Windows.RoutedEvent StrokeErasedEvent;
public readonly static System.Windows.DependencyProperty StrokesProperty;
public readonly static System.Windows.DependencyProperty TopProperty;
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
// URLString
//
//
// Implementation of membership condition for zones
//
namespace System.Security.Util {
using System;
using System.Collections;
using System.Collections.Generic;
using System.Runtime.CompilerServices;
using System.Runtime.InteropServices;
using System.Runtime.Versioning;
using System.Runtime.Serialization;
using System.Globalization;
using System.Text;
using System.IO;
using System.Diagnostics.Contracts;
internal sealed class URLString : SiteString
{
private String m_protocol;
[OptionalField(VersionAdded = 2)]
private String m_userpass;
private SiteString m_siteString;
private int m_port;
#if !PLATFORM_UNIX
private LocalSiteString m_localSite;
#endif // !PLATFORM_UNIX
private DirectoryString m_directory;
private const String m_defaultProtocol = "file";
[OptionalField(VersionAdded = 2)]
private bool m_parseDeferred;
[OptionalField(VersionAdded = 2)]
private String m_urlOriginal;
[OptionalField(VersionAdded = 2)]
private bool m_parsedOriginal;
[OptionalField(VersionAdded = 3)]
private bool m_isUncShare;
// legacy field from v1.x, not used in v2 and beyond. Retained purely for serialization compatibility.
private String m_fullurl;
[OnDeserialized]
public void OnDeserialized(StreamingContext ctx)
{
if (m_urlOriginal == null)
{
// pre-v2 deserialization. Need to fix-up fields here
m_parseDeferred = false;
m_parsedOriginal = false; // Dont care what this value is - never used
m_userpass = "";
m_urlOriginal = m_fullurl;
m_fullurl = null;
}
}
[OnSerializing]
private void OnSerializing(StreamingContext ctx)
{
if ((ctx.State & ~(StreamingContextStates.Clone|StreamingContextStates.CrossAppDomain)) != 0)
{
DoDeferredParse();
m_fullurl = m_urlOriginal;
}
}
[OnSerialized]
private void OnSerialized(StreamingContext ctx)
{
if ((ctx.State & ~(StreamingContextStates.Clone|StreamingContextStates.CrossAppDomain)) != 0)
{
m_fullurl = null;
}
}
public URLString()
{
m_protocol = "";
m_userpass = "";
m_siteString = new SiteString();
m_port = -1;
#if !PLATFORM_UNIX
m_localSite = null;
#endif // !PLATFORM_UNIX
m_directory = new DirectoryString();
m_parseDeferred = false;
}
private void DoDeferredParse()
{
if (m_parseDeferred)
{
ParseString(m_urlOriginal, m_parsedOriginal);
m_parseDeferred = false;
}
}
public URLString(string url) : this(url, false, false) {}
public URLString(string url, bool parsed) : this(url, parsed, false) {}
internal URLString(string url, bool parsed, bool doDeferredParsing)
{
m_port = -1;
m_userpass = "";
DoFastChecks(url);
m_urlOriginal = url;
m_parsedOriginal = parsed;
m_parseDeferred = true;
if (doDeferredParsing)
DoDeferredParse();
}
// Converts %XX and %uYYYY to the actual characters (I.e. Unesacpes any escape characters present in the URL)
private String UnescapeURL(String url)
{
StringBuilder intermediate = StringBuilderCache.Acquire(url.Length);
int Rindex = 0; // index into temp that gives the rest of the string to be processed
int index;
int braIndex = -1;
int ketIndex = -1;
braIndex = url.IndexOf('[',Rindex);
if (braIndex != -1)
ketIndex = url.IndexOf(']', braIndex);
do
{
index = url.IndexOf( '%', Rindex);
if (index == -1)
{
intermediate = intermediate.Append(url, Rindex, (url.Length - Rindex));
break;
}
// if we hit a '%' in the middle of an IPv6 address, dont process that
if (index > braIndex && index < ketIndex)
{
intermediate = intermediate.Append(url, Rindex, (ketIndex - Rindex+1));
Rindex = ketIndex+1;
continue;
}
if (url.Length - index < 2) // Check that there is at least 1 char after the '%'
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
if (url[index+1] == 'u' || url[index+1] == 'U')
{
if (url.Length - index < 6) // example: "%u004d" is 6 chars long
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
// We have a unicode character specified in hex
try
{
char c = (char)(Hex.ConvertHexDigit( url[index+2] ) << 12 |
Hex.ConvertHexDigit( url[index+3] ) << 8 |
Hex.ConvertHexDigit( url[index+4] ) << 4 |
Hex.ConvertHexDigit( url[index+5] ));
intermediate = intermediate.Append(url, Rindex, index - Rindex);
intermediate = intermediate.Append(c);
}
catch(ArgumentException) // Hex.ConvertHexDigit can throw an "out of range" ArgumentException
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
Rindex = index + 6 ; //update the 'seen' length
}
else
{
// we have a hex character.
if (url.Length - index < 3) // example: "%4d" is 3 chars long
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
try
{
char c = (char)(Hex.ConvertHexDigit( url[index+1] ) << 4 | Hex.ConvertHexDigit( url[index+2] ));
intermediate = intermediate.Append(url, Rindex, index - Rindex);
intermediate = intermediate.Append(c);
}
catch(ArgumentException) // Hex.ConvertHexDigit can throw an "out of range" ArgumentException
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
Rindex = index + 3; // update the 'seen' length
}
}
while (true);
return StringBuilderCache.GetStringAndRelease(intermediate);
}
// Helper Function for ParseString:
// Search for the end of the protocol info and grab the actual protocol string
// ex. http://www.microsoft.com/complus would have a protocol string of http
private String ParseProtocol(String url)
{
String temp;
int index = url.IndexOf( ':' );
if (index == 0)
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
else if (index == -1)
{
m_protocol = m_defaultProtocol;
temp = url;
}
else if (url.Length > index + 1)
{
if (index == m_defaultProtocol.Length &&
String.Compare(url, 0, m_defaultProtocol, 0, index, StringComparison.OrdinalIgnoreCase) == 0)
{
m_protocol = m_defaultProtocol;
temp = url.Substring( index + 1 );
// Since an explicit file:// URL could be immediately followed by a host name, we will be
// conservative and assume that it is on a share rather than a potentally relative local
// URL.
m_isUncShare = true;
}
else if (url[index+1] != '\\')
{
#if !PLATFORM_UNIX
if (url.Length > index + 2 &&
url[index+1] == '/' &&
url[index+2] == '/')
#else
if (url.Length > index + 1 &&
url[index+1] == '/' ) // UNIX style "file:/home/me" is allowed, so account for that
#endif // !PLATFORM_UNIX
{
m_protocol = url.Substring( 0, index );
for (int i = 0; i < m_protocol.Length; ++i)
{
char c = m_protocol[i];
if ((c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z') ||
(c >= '0' && c <= '9') ||
(c == '+') ||
(c == '.') ||
(c == '-'))
{
continue;
}
else
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
}
#if !PLATFORM_UNIX
temp = url.Substring( index + 3 );
#else
// In UNIX, we don't know how many characters we'll have to skip past.
// Skip past \, /, and :
//
for ( int j=index ; j<url.Length ; j++ )
{
if ( url[j] != '\\' && url[j] != '/' && url[j] != ':' )
{
index = j;
break;
}
}
temp = url.Substring( index );
#endif // !PLATFORM_UNIX
}
else
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
}
else
{
m_protocol = m_defaultProtocol;
temp = url;
}
}
else
{
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
return temp;
}
private String ParsePort(String url)
{
String temp = url;
char[] separators = new char[] { ':', '/' };
int Rindex = 0;
int userpassIndex = temp.IndexOf('@');
if (userpassIndex != -1) {
if (temp.IndexOf('/',0,userpassIndex) == -1) {
// this is a user:pass type of string
m_userpass = temp.Substring(0,userpassIndex);
Rindex = userpassIndex + 1;
}
}
int braIndex = -1;
int ketIndex = -1;
int portIndex = -1;
braIndex = url.IndexOf('[',Rindex);
if (braIndex != -1)
ketIndex = url.IndexOf(']', braIndex);
if (ketIndex != -1)
{
// IPv6 address...ignore the IPv6 block when searching for the port
portIndex = temp.IndexOfAny(separators,ketIndex);
}
else
{
portIndex = temp.IndexOfAny(separators,Rindex);
}
if (portIndex != -1 && temp[portIndex] == ':')
{
// make sure it really is a port, and has a number after the :
if ( temp[portIndex+1] >= '0' && temp[portIndex+1] <= '9' )
{
int tempIndex = temp.IndexOf( '/', Rindex);
if (tempIndex == -1)
{
m_port = Int32.Parse( temp.Substring(portIndex + 1), CultureInfo.InvariantCulture );
if (m_port < 0)
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
temp = temp.Substring( Rindex, portIndex - Rindex );
}
else if (tempIndex > portIndex)
{
m_port = Int32.Parse( temp.Substring(portIndex + 1, tempIndex - portIndex - 1), CultureInfo.InvariantCulture );
temp = temp.Substring( Rindex, portIndex - Rindex ) + temp.Substring( tempIndex );
}
else
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
else
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
}
else {
// Chop of the user/pass portion if any
temp = temp.Substring(Rindex);
}
return temp;
}
// This does three things:
// 1. It makes the following modifications to the start of the string:
// a. \\?\ and \\?/ => <empty>
// b. \\.\ and \\./ => <empty>
// 2. If isFileUrl is true, converts all slashes to front slashes and strips leading
// front slashes. See comment by code.
// 3. Throws a PathTooLongException if the length of the resulting URL is >= MAX_PATH.
// This is done to prevent security issues due to canonicalization truncations.
// Remove this method when the Path class supports "\\?\"
internal static string PreProcessForExtendedPathRemoval(string url, bool isFileUrl)
{
return PreProcessForExtendedPathRemoval(checkPathLength: true, url: url, isFileUrl: isFileUrl);
}
internal static string PreProcessForExtendedPathRemoval(bool checkPathLength, string url, bool isFileUrl)
{
bool isUncShare = false;
return PreProcessForExtendedPathRemoval(checkPathLength: checkPathLength, url: url, isFileUrl: isFileUrl, isUncShare: ref isUncShare);
}
// Keeping this signature to avoid reflection breaks
private static string PreProcessForExtendedPathRemoval(string url, bool isFileUrl, ref bool isUncShare)
{
return PreProcessForExtendedPathRemoval(checkPathLength: true, url: url, isFileUrl: isFileUrl, isUncShare: ref isUncShare);
}
private static string PreProcessForExtendedPathRemoval(bool checkPathLength, string url, bool isFileUrl, ref bool isUncShare)
{
// This is the modified URL that we will return
StringBuilder modifiedUrl = new StringBuilder(url);
// ITEM 1 - remove extended path characters.
{
// Keep track of where we are in both the comparison and altered strings.
int curCmpIdx = 0;
int curModIdx = 0;
// If all the '\' have already been converted to '/', just check for //?/ or //./
if ((url.Length - curCmpIdx) >= 4 &&
(String.Compare(url, curCmpIdx, "//?/", 0, 4, StringComparison.OrdinalIgnoreCase) == 0 ||
String.Compare(url, curCmpIdx, "//./", 0, 4, StringComparison.OrdinalIgnoreCase) == 0))
{
modifiedUrl.Remove(curModIdx, 4);
curCmpIdx += 4;
}
else
{
if (isFileUrl) {
// We need to handle an indefinite number of leading front slashes for file URLs since we could
// get something like:
// file://\\?\
// file:/\\?\
// file:\\?\
// etc...
while (url[curCmpIdx] == '/')
{
curCmpIdx++;
curModIdx++;
}
}
// Remove the extended path characters
if ((url.Length - curCmpIdx) >= 4 &&
(String.Compare(url, curCmpIdx, "\\\\?\\", 0, 4, StringComparison.OrdinalIgnoreCase) == 0 ||
String.Compare(url, curCmpIdx, "\\\\?/", 0, 4, StringComparison.OrdinalIgnoreCase) == 0 ||
String.Compare(url, curCmpIdx, "\\\\.\\", 0, 4, StringComparison.OrdinalIgnoreCase) == 0 ||
String.Compare(url, curCmpIdx, "\\\\./", 0, 4, StringComparison.OrdinalIgnoreCase) == 0))
{
modifiedUrl.Remove(curModIdx, 4);
curCmpIdx += 4;
}
}
}
// ITEM 2 - convert all slashes to forward slashes, and strip leading slashes.
if (isFileUrl)
{
int slashCount = 0;
bool seenFirstBackslash = false;
while (slashCount < modifiedUrl.Length && (modifiedUrl[slashCount] == '/' || modifiedUrl[slashCount] == '\\'))
{
// Look for sets of consecutive backslashes. We can't just look for these at the start
// of the string, since file:// might come first. Instead, once we see the first \, look
// for a second one following it.
if (!seenFirstBackslash && modifiedUrl[slashCount] == '\\')
{
seenFirstBackslash = true;
if (slashCount + 1 < modifiedUrl.Length && modifiedUrl[slashCount + 1] == '\\')
isUncShare = true;
}
slashCount++;
}
modifiedUrl.Remove(0, slashCount);
modifiedUrl.Replace('\\', '/');
}
// ITEM 3 - If the path is greater than or equal (due to terminating NULL in windows) MAX_PATH, we throw.
if (checkPathLength)
{
// This needs to be a separate method to avoid hitting the static constructor on AppContextSwitches
CheckPathTooLong(modifiedUrl);
}
// Create the result string from the StringBuilder
return modifiedUrl.ToString();
}
[MethodImpl(MethodImplOptions.NoInlining)]
private static void CheckPathTooLong(StringBuilder path)
{
if (path.Length >= (
#if PLATFORM_UNIX
Interop.Sys.MaxPath))
#else
PathInternal.MaxLongPath))
#endif
{
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
}
}
// Do any misc massaging of data in the URL
private String PreProcessURL(String url, bool isFileURL)
{
#if !PLATFORM_UNIX
if (isFileURL) {
// Remove when the Path class supports "\\?\"
url = PreProcessForExtendedPathRemoval(url, true, ref m_isUncShare);
}
else {
url = url.Replace('\\', '/');
}
return url;
#else
// Remove superfluous '/'
// For UNIX, the file path would look something like:
// file:///home/johndoe/here
// file:/home/johndoe/here
// file:../johndoe/here
// file:~/johndoe/here
String temp = url;
int nbSlashes = 0;
while(nbSlashes<temp.Length && '/'==temp[nbSlashes])
nbSlashes++;
// if we get a path like file:///directory/name we need to convert
// this to /directory/name.
if(nbSlashes > 2)
temp = temp.Substring(nbSlashes-1, temp.Length - (nbSlashes-1));
else if (2 == nbSlashes) /* it's a relative path */
temp = temp.Substring(nbSlashes, temp.Length - nbSlashes);
return temp;
#endif // !PLATFORM_UNIX
}
private void ParseFileURL(String url)
{
String temp = url;
#if !PLATFORM_UNIX
int index = temp.IndexOf( '/');
if (index != -1 &&
((index == 2 &&
temp[index-1] != ':' &&
temp[index-1] != '|') ||
index != 2) &&
index != temp.Length - 1)
{
// Also, if it is a UNC share, we want m_localSite to
// be of the form "computername/share", so if the first
// fileEnd character found is a slash, do some more parsing
// to find the proper end character.
int tempIndex = temp.IndexOf( '/', index+1);
if (tempIndex != -1)
index = tempIndex;
else
index = -1;
}
String localSite;
if (index == -1)
localSite = temp;
else
localSite = temp.Substring(0,index);
if (localSite.Length == 0)
throw new ArgumentException( Environment.GetResourceString( "Argument_InvalidUrl" ) );
int i;
bool spacesAllowed;
if (localSite[0] == '\\' && localSite[1] == '\\')
{
spacesAllowed = true;
i = 2;
}
else
{
i = 0;
spacesAllowed = false;
}
bool useSmallCharToUpper = true;
for (; i < localSite.Length; ++i)
{
char c = localSite[i];
if ((c >= 'A' && c <= 'Z') ||
(c >= 'a' && c <= 'z') ||
(c >= '0' && c <= '9') ||
(c == '-') || (c == '/') ||
(c == ':') || (c == '|') ||
(c == '.') || (c == '*') ||
(c == '$') || (spacesAllowed && c == ' '))
{
continue;
}
else
{
useSmallCharToUpper = false;
break;
}
}
if (useSmallCharToUpper)
localSite = String.SmallCharToUpper( localSite );
else
localSite = localSite.ToUpper(CultureInfo.InvariantCulture);
m_localSite = new LocalSiteString( localSite );
if (index == -1)
{
if (localSite[localSite.Length-1] == '*')
m_directory = new DirectoryString( "*", false );
else
m_directory = new DirectoryString();
}
else
{
String directoryString = temp.Substring( index + 1 );
if (directoryString.Length == 0)
{
m_directory = new DirectoryString();
}
else
{
m_directory = new DirectoryString( directoryString, true);
}
}
#else // !PLATFORM_UNIX
m_directory = new DirectoryString( temp, true);
#endif // !PLATFORM_UNIX
m_siteString = null;
return;
}
private void ParseNonFileURL(String url)
{
String temp = url;
int index = temp.IndexOf('/');
if (index == -1)
{
#if !PLATFORM_UNIX
m_localSite = null; // for drive letter
#endif // !PLATFORM_UNIX
m_siteString = new SiteString( temp );
m_directory = new DirectoryString();
}
else
{
#if !PLATFORM_UNIX
String site = temp.Substring( 0, index );
m_localSite = null;
m_siteString = new SiteString( site );
String directoryString = temp.Substring( index + 1 );
if (directoryString.Length == 0)
{
m_directory = new DirectoryString();
}
else
{
m_directory = new DirectoryString( directoryString, false );
}
#else
String directoryString = temp.Substring( index + 1 );
String site = temp.Substring( 0, index );
m_directory = new DirectoryString( directoryString, false );
m_siteString = new SiteString( site );
#endif //!PLATFORM_UNIX
}
return;
}
void DoFastChecks( String url )
{
if (url == null)
{
throw new ArgumentNullException( nameof(url) );
}
Contract.EndContractBlock();
if (url.Length == 0)
{
throw new FormatException(Environment.GetResourceString("Format_StringZeroLength"));
}
}
// NOTE:
// 1. We support URLs that follow the common Internet scheme syntax
// (<scheme>://user:pass@<host>:<port>/<url-path>) and all windows file URLs.
// 2. In the general case we parse of the site and create a SiteString out of it
// (which supports our wildcarding scheme). In the case of files we don't support
// wildcarding and furthermore SiteString doesn't like ':' and '|' which can appear
// in file urls so we just keep that info in a separate string and set the
// SiteString to null.
//
// ex. http://www.microsoft.com/complus -> m_siteString = "www.microsoft.com" m_localSite = null
// ex. file:///c:/complus/mscorlib.dll -> m_siteString = null m_localSite = "c:"
// ex. file:///c|/complus/mscorlib.dll -> m_siteString = null m_localSite = "c:"
void ParseString( String url, bool parsed )
{
// If there are any escaped hex or unicode characters in the url, translate those
// into the proper character.
if (!parsed)
{
url = UnescapeURL(url);
}
// Identify the protocol and strip the protocol info from the string, if present.
String temp = ParseProtocol(url);
bool fileProtocol = (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase) == 0);
// handle any special preocessing...removing extra characters, etc.
temp = PreProcessURL(temp, fileProtocol);
if (fileProtocol)
{
ParseFileURL(temp);
}
else
{
// Check if there is a port number and parse that out.
temp = ParsePort(temp);
ParseNonFileURL(temp);
// Note: that we allow DNS and Netbios names for non-file protocols (since sitestring will check
// that the hostname satisfies these two protocols. DNS-only checking can theoretically be added
// here but that would break all the programs that use '_' (which is fairly common, yet illegal).
// If this needs to be done at any point, add a call to m_siteString.IsLegalDNSName().
}
}
public String Scheme
{
get
{
DoDeferredParse();
return m_protocol;
}
}
public String Host
{
get
{
DoDeferredParse();
if (m_siteString != null)
{
return m_siteString.ToString();
}
else
{
#if !PLATFORM_UNIX
return m_localSite.ToString();
#else
return "";
#endif // !PLATFORM_UNIX
}
}
}
public String Port
{
get
{
DoDeferredParse();
if (m_port == -1)
return null;
else
return m_port.ToString(CultureInfo.InvariantCulture);
}
}
public String Directory
{
get
{
DoDeferredParse();
return m_directory.ToString();
}
}
/// <summary>
/// Make a best guess at determining if this is URL refers to a file with a relative path. Since
/// this is a guess to help out users of UrlMembershipCondition who may accidentally supply a
/// relative URL, we'd rather err on the side of absolute than relative. (We'd rather accept some
/// meaningless membership conditions rather than reject meaningful ones).
///
/// In order to be a relative file URL, the URL needs to have a protocol of file, and not be on a
/// UNC share.
///
/// If both of the above are true, then the heuristics we'll use to detect an absolute URL are:
/// 1. A host name which is:
/// a. greater than one character and ends in a colon (representing the drive letter) OR
/// b. ends with a * (so we match any file with the given prefix if any)
/// 2. Has a directory name (cannot be simply file://c:)
/// </summary>
public bool IsRelativeFileUrl
{
get
{
DoDeferredParse();
if (String.Equals(m_protocol, "file", StringComparison.OrdinalIgnoreCase) && !m_isUncShare)
{
#if !PLATFORM_UNIX
string host = m_localSite != null ? m_localSite.ToString() : null;
// If the host name ends with the * character, treat this as an absolute URL since the *
// could represent the rest of the full path.
if (host.EndsWith('*'))
return false;
#endif // !PLATFORM_UNIX
string directory = m_directory != null ? m_directory.ToString() : null;
#if !PLATFORM_UNIX
return host == null || host.Length < 2 || !host.EndsWith(':') ||
String.IsNullOrEmpty(directory);
#else
return String.IsNullOrEmpty(directory);
#endif // !PLATFORM_UNIX
}
// Since this is not a local URL, it cannot be relative
return false;
}
}
public String GetFileName()
{
DoDeferredParse();
#if !PLATFORM_UNIX
if (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase) != 0)
return null;
String intermediateDirectory = this.Directory.Replace( '/', '\\' );
String directory = this.Host.Replace( '/', '\\' );
int directorySlashIndex = directory.IndexOf( '\\' );
if (directorySlashIndex == -1)
{
if (directory.Length != 2 ||
!(directory[1] == ':' || directory[1] == '|'))
{
directory = "\\\\" + directory;
}
}
else if (directorySlashIndex != 2 ||
(directorySlashIndex == 2 && directory[1] != ':' && directory[1] != '|'))
{
directory = "\\\\" + directory;
}
directory += "\\" + intermediateDirectory;
return directory;
#else
// In Unix, directory contains the full pathname
// (this is what we get in Win32)
if (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase ) != 0)
return null;
return this.Directory;
#endif // !PLATFORM_UNIX
}
public String GetDirectoryName()
{
DoDeferredParse();
#if !PLATFORM_UNIX
if (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase ) != 0)
return null;
String intermediateDirectory = this.Directory.Replace( '/', '\\' );
int slashIndex = 0;
for (int i = intermediateDirectory.Length; i > 0; i--)
{
if (intermediateDirectory[i-1] == '\\')
{
slashIndex = i;
break;
}
}
String directory = this.Host.Replace( '/', '\\' );
int directorySlashIndex = directory.IndexOf( '\\' );
if (directorySlashIndex == -1)
{
if (directory.Length != 2 ||
!(directory[1] == ':' || directory[1] == '|'))
{
directory = "\\\\" + directory;
}
}
else if (directorySlashIndex > 2 ||
(directorySlashIndex == 2 && directory[1] != ':' && directory[1] != '|'))
{
directory = "\\\\" + directory;
}
directory += "\\";
if (slashIndex > 0)
{
directory += intermediateDirectory.Substring( 0, slashIndex );
}
return directory;
#else
if (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase) != 0)
return null;
String directory = this.Directory.ToString();
int slashIndex = 0;
for (int i = directory.Length; i > 0; i--)
{
if (directory[i-1] == '/')
{
slashIndex = i;
break;
}
}
if (slashIndex > 0)
{
directory = directory.Substring( 0, slashIndex );
}
return directory;
#endif // !PLATFORM_UNIX
}
public override SiteString Copy()
{
return new URLString( m_urlOriginal, m_parsedOriginal );
}
public override bool IsSubsetOf( SiteString site )
{
if (site == null)
{
return false;
}
URLString url = site as URLString;
if (url == null)
{
return false;
}
DoDeferredParse();
url.DoDeferredParse();
URLString normalUrl1 = this.SpecialNormalizeUrl();
URLString normalUrl2 = url.SpecialNormalizeUrl();
if (String.Compare( normalUrl1.m_protocol, normalUrl2.m_protocol, StringComparison.OrdinalIgnoreCase) == 0 &&
normalUrl1.m_directory.IsSubsetOf( normalUrl2.m_directory ))
{
#if !PLATFORM_UNIX
if (normalUrl1.m_localSite != null)
{
// We do a little extra processing in here for local files since we allow
// both <drive_letter>: and <drive_letter>| forms of urls.
return normalUrl1.m_localSite.IsSubsetOf( normalUrl2.m_localSite );
}
else
#endif // !PLATFORM_UNIX
{
if (normalUrl1.m_port != normalUrl2.m_port)
return false;
return normalUrl2.m_siteString != null && normalUrl1.m_siteString.IsSubsetOf( normalUrl2.m_siteString );
}
}
else
{
return false;
}
}
public override String ToString()
{
return m_urlOriginal;
}
public override bool Equals(Object o)
{
DoDeferredParse();
if (o == null || !(o is URLString))
return false;
else
return this.Equals( (URLString)o );
}
public override int GetHashCode()
{
DoDeferredParse();
TextInfo info = CultureInfo.InvariantCulture.TextInfo;
int accumulator = 0;
if (this.m_protocol != null)
accumulator = info.GetCaseInsensitiveHashCode( this.m_protocol );
#if !PLATFORM_UNIX
if (this.m_localSite != null)
{
accumulator = accumulator ^ this.m_localSite.GetHashCode();
}
else
{
accumulator = accumulator ^ this.m_siteString.GetHashCode();
}
accumulator = accumulator ^ this.m_directory.GetHashCode();
#else
accumulator = accumulator ^ info.GetCaseInsensitiveHashCode(this.m_urlOriginal);
#endif // !PLATFORM_UNIX
return accumulator;
}
public bool Equals( URLString url )
{
return CompareUrls( this, url );
}
public static bool CompareUrls( URLString url1, URLString url2 )
{
if (url1 == null && url2 == null)
return true;
if (url1 == null || url2 == null)
return false;
url1.DoDeferredParse();
url2.DoDeferredParse();
URLString normalUrl1 = url1.SpecialNormalizeUrl();
URLString normalUrl2 = url2.SpecialNormalizeUrl();
// Compare protocol (case insensitive)
if (String.Compare( normalUrl1.m_protocol, normalUrl2.m_protocol, StringComparison.OrdinalIgnoreCase) != 0)
return false;
// Do special processing for file urls
if (String.Compare( normalUrl1.m_protocol, "file", StringComparison.OrdinalIgnoreCase) == 0)
{
#if !PLATFORM_UNIX
if (!normalUrl1.m_localSite.IsSubsetOf( normalUrl2.m_localSite ) ||
!normalUrl2.m_localSite.IsSubsetOf( normalUrl1.m_localSite ))
return false;
#else
return url1.IsSubsetOf( url2 ) &&
url2.IsSubsetOf( url1 );
#endif // !PLATFORM_UNIX
}
else
{
if (String.Compare( normalUrl1.m_userpass, normalUrl2.m_userpass, StringComparison.Ordinal) != 0)
return false;
if (!normalUrl1.m_siteString.IsSubsetOf( normalUrl2.m_siteString ) ||
!normalUrl2.m_siteString.IsSubsetOf( normalUrl1.m_siteString ))
return false;
if (url1.m_port != url2.m_port)
return false;
}
if (!normalUrl1.m_directory.IsSubsetOf( normalUrl2.m_directory ) ||
!normalUrl2.m_directory.IsSubsetOf( normalUrl1.m_directory ))
return false;
return true;
}
internal String NormalizeUrl()
{
DoDeferredParse();
StringBuilder builtUrl = StringBuilderCache.Acquire();
if (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase) == 0)
{
#if !PLATFORM_UNIX
builtUrl = builtUrl.AppendFormat("FILE:///{0}/{1}", m_localSite.ToString(), m_directory.ToString());
#else
builtUrl = builtUrl.AppendFormat("FILE:///{0}", m_directory.ToString());
#endif // !PLATFORM_UNIX
}
else
{
builtUrl = builtUrl.AppendFormat("{0}://{1}{2}", m_protocol, m_userpass, m_siteString.ToString());
if (m_port != -1)
builtUrl = builtUrl.AppendFormat("{0}",m_port);
builtUrl = builtUrl.AppendFormat("/{0}", m_directory.ToString());
}
return StringBuilderCache.GetStringAndRelease(builtUrl).ToUpper(CultureInfo.InvariantCulture);
}
#if !PLATFORM_UNIX
internal URLString SpecialNormalizeUrl()
{
// Under WinXP, file protocol urls can be mapped to
// drives that aren't actually file protocol underneath
// due to drive mounting. This code attempts to figure
// out what a drive is mounted to and create the
// url is maps to.
DoDeferredParse();
if (String.Compare( m_protocol, "file", StringComparison.OrdinalIgnoreCase) != 0)
{
return this;
}
else
{
String localSite = m_localSite.ToString();
if (localSite.Length == 2 &&
(localSite[1] == '|' ||
localSite[1] == ':'))
{
String deviceName = null;
GetDeviceName(localSite, JitHelpers.GetStringHandleOnStack(ref deviceName));
if (deviceName != null)
{
if (deviceName.IndexOf( "://", StringComparison.Ordinal ) != -1)
{
URLString u = new URLString( deviceName + "/" + this.m_directory.ToString() );
u.DoDeferredParse(); // Presumably the caller of SpecialNormalizeUrl wants a fully parsed URL
return u;
}
else
{
URLString u = new URLString( "file://" + deviceName + "/" + this.m_directory.ToString() );
u.DoDeferredParse();// Presumably the caller of SpecialNormalizeUrl wants a fully parsed URL
return u;
}
}
else
return this;
}
else
{
return this;
}
}
}
[DllImport(JitHelpers.QCall, CharSet = CharSet.Unicode)]
[SuppressUnmanagedCodeSecurity]
private static extern void GetDeviceName( String driveLetter, StringHandleOnStack retDeviceName );
#else
internal URLString SpecialNormalizeUrl()
{
return this;
}
#endif // !PLATFORM_UNIX
}
[Serializable]
internal class DirectoryString : SiteString
{
private bool m_checkForIllegalChars;
// From KB #Q177506, file/folder illegal characters are \ / : * ? " < > |
protected static char[] m_illegalDirectoryCharacters = { '\\', ':', '*', '?', '"', '<', '>', '|' };
public DirectoryString()
{
m_site = "";
m_separatedSite = new ArrayList();
}
public DirectoryString( String directory, bool checkForIllegalChars )
{
m_site = directory;
m_checkForIllegalChars = checkForIllegalChars;
m_separatedSite = CreateSeparatedString(directory);
}
private ArrayList CreateSeparatedString(String directory)
{
if (directory == null || directory.Length == 0)
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
}
Contract.EndContractBlock();
ArrayList list = new ArrayList();
String[] separatedArray = directory.Split('/');
for (int index = 0; index < separatedArray.Length; ++index)
{
if (separatedArray[index] == null || separatedArray[index].Equals( "" ))
{
// this case is fine, we just ignore it the extra separators.
}
else if (separatedArray[index].Equals( "*" ))
{
if (index != separatedArray.Length-1)
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
}
list.Add( separatedArray[index] );
}
else if (m_checkForIllegalChars && separatedArray[index].IndexOfAny( m_illegalDirectoryCharacters ) != -1)
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
}
else
{
list.Add( separatedArray[index] );
}
}
return list;
}
public virtual bool IsSubsetOf( DirectoryString operand )
{
return this.IsSubsetOf( operand, true );
}
public virtual bool IsSubsetOf( DirectoryString operand, bool ignoreCase )
{
if (operand == null)
{
return false;
}
else if (operand.m_separatedSite.Count == 0)
{
return this.m_separatedSite.Count == 0 || this.m_separatedSite.Count > 0 && String.Compare((String)this.m_separatedSite[0], "*", StringComparison.Ordinal) == 0;
}
else if (this.m_separatedSite.Count == 0)
{
return String.Compare((String)operand.m_separatedSite[0], "*", StringComparison.Ordinal) == 0;
}
else
{
return base.IsSubsetOf( operand, ignoreCase );
}
}
}
#if !PLATFORM_UNIX
[Serializable]
internal class LocalSiteString : SiteString
{
public LocalSiteString( String site )
{
m_site = site.Replace( '|', ':');
if (m_site.Length > 2 && m_site.IndexOf( ':' ) != -1)
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
m_separatedSite = CreateSeparatedString(m_site);
}
private ArrayList CreateSeparatedString(String directory)
{
if (directory == null || directory.Length == 0)
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
}
Contract.EndContractBlock();
ArrayList list = new ArrayList();
String[] separatedArray = directory.Split('/');
for (int index = 0; index < separatedArray.Length; ++index)
{
if (separatedArray[index] == null || separatedArray[index].Equals( "" ))
{
if (index < 2 &&
directory[index] == '/')
{
list.Add( "//" );
}
else if (index != separatedArray.Length-1)
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
}
}
else if (separatedArray[index].Equals( "*" ))
{
if (index != separatedArray.Length-1)
{
throw new ArgumentException(Environment.GetResourceString("Argument_InvalidDirectoryOnUrl"));
}
list.Add( separatedArray[index] );
}
else
{
list.Add( separatedArray[index] );
}
}
return list;
}
public virtual bool IsSubsetOf( LocalSiteString operand )
{
return this.IsSubsetOf( operand, true );
}
public virtual bool IsSubsetOf( LocalSiteString operand, bool ignoreCase )
{
if (operand == null)
{
return false;
}
else if (operand.m_separatedSite.Count == 0)
{
return this.m_separatedSite.Count == 0 || this.m_separatedSite.Count > 0 && String.Compare((String)this.m_separatedSite[0], "*", StringComparison.Ordinal) == 0;
}
else if (this.m_separatedSite.Count == 0)
{
return String.Compare((String)operand.m_separatedSite[0], "*", StringComparison.Ordinal) == 0;
}
else
{
return base.IsSubsetOf( operand, ignoreCase );
}
}
}
#endif // !PLATFORM_UNIX
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagr = Google.Api.Gax.ResourceNames;
using gcav = Google.Cloud.AIPlatform.V1;
namespace Google.Cloud.AIPlatform.V1
{
public partial class CreateCustomJobRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class GetCustomJobRequest
{
/// <summary>
/// <see cref="gcav::CustomJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::CustomJobName CustomJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::CustomJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ListCustomJobsRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class DeleteCustomJobRequest
{
/// <summary>
/// <see cref="gcav::CustomJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::CustomJobName CustomJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::CustomJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CancelCustomJobRequest
{
/// <summary>
/// <see cref="gcav::CustomJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::CustomJobName CustomJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::CustomJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CreateDataLabelingJobRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class GetDataLabelingJobRequest
{
/// <summary>
/// <see cref="gcav::DataLabelingJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::DataLabelingJobName DataLabelingJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::DataLabelingJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ListDataLabelingJobsRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class DeleteDataLabelingJobRequest
{
/// <summary>
/// <see cref="gcav::DataLabelingJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::DataLabelingJobName DataLabelingJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::DataLabelingJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CancelDataLabelingJobRequest
{
/// <summary>
/// <see cref="gcav::DataLabelingJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::DataLabelingJobName DataLabelingJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::DataLabelingJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CreateHyperparameterTuningJobRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class GetHyperparameterTuningJobRequest
{
/// <summary>
/// <see cref="gcav::HyperparameterTuningJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::HyperparameterTuningJobName HyperparameterTuningJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::HyperparameterTuningJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ListHyperparameterTuningJobsRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class DeleteHyperparameterTuningJobRequest
{
/// <summary>
/// <see cref="gcav::HyperparameterTuningJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::HyperparameterTuningJobName HyperparameterTuningJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::HyperparameterTuningJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CancelHyperparameterTuningJobRequest
{
/// <summary>
/// <see cref="gcav::HyperparameterTuningJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::HyperparameterTuningJobName HyperparameterTuningJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::HyperparameterTuningJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CreateBatchPredictionJobRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class GetBatchPredictionJobRequest
{
/// <summary>
/// <see cref="gcav::BatchPredictionJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::BatchPredictionJobName BatchPredictionJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::BatchPredictionJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ListBatchPredictionJobsRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class DeleteBatchPredictionJobRequest
{
/// <summary>
/// <see cref="gcav::BatchPredictionJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::BatchPredictionJobName BatchPredictionJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::BatchPredictionJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CancelBatchPredictionJobRequest
{
/// <summary>
/// <see cref="gcav::BatchPredictionJobName"/>-typed view over the <see cref="Name"/> resource name property.
/// </summary>
public gcav::BatchPredictionJobName BatchPredictionJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::BatchPredictionJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class CreateModelDeploymentMonitoringJobRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class SearchModelDeploymentMonitoringStatsAnomaliesRequest
{
/// <summary>
/// <see cref="ModelDeploymentMonitoringJobName"/>-typed view over the
/// <see cref="ModelDeploymentMonitoringJob"/> resource name property.
/// </summary>
public ModelDeploymentMonitoringJobName ModelDeploymentMonitoringJobAsModelDeploymentMonitoringJobName
{
get => string.IsNullOrEmpty(ModelDeploymentMonitoringJob) ? null : ModelDeploymentMonitoringJobName.Parse(ModelDeploymentMonitoringJob, allowUnparsed: true);
set => ModelDeploymentMonitoringJob = value?.ToString() ?? "";
}
}
public partial class GetModelDeploymentMonitoringJobRequest
{
/// <summary>
/// <see cref="gcav::ModelDeploymentMonitoringJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::ModelDeploymentMonitoringJobName ModelDeploymentMonitoringJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::ModelDeploymentMonitoringJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ListModelDeploymentMonitoringJobsRequest
{
/// <summary>
/// <see cref="gagr::LocationName"/>-typed view over the <see cref="Parent"/> resource name property.
/// </summary>
public gagr::LocationName ParentAsLocationName
{
get => string.IsNullOrEmpty(Parent) ? null : gagr::LocationName.Parse(Parent, allowUnparsed: true);
set => Parent = value?.ToString() ?? "";
}
}
public partial class DeleteModelDeploymentMonitoringJobRequest
{
/// <summary>
/// <see cref="gcav::ModelDeploymentMonitoringJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::ModelDeploymentMonitoringJobName ModelDeploymentMonitoringJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::ModelDeploymentMonitoringJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class PauseModelDeploymentMonitoringJobRequest
{
/// <summary>
/// <see cref="gcav::ModelDeploymentMonitoringJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::ModelDeploymentMonitoringJobName ModelDeploymentMonitoringJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::ModelDeploymentMonitoringJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
public partial class ResumeModelDeploymentMonitoringJobRequest
{
/// <summary>
/// <see cref="gcav::ModelDeploymentMonitoringJobName"/>-typed view over the <see cref="Name"/> resource name
/// property.
/// </summary>
public gcav::ModelDeploymentMonitoringJobName ModelDeploymentMonitoringJobName
{
get => string.IsNullOrEmpty(Name) ? null : gcav::ModelDeploymentMonitoringJobName.Parse(Name, allowUnparsed: true);
set => Name = value?.ToString() ?? "";
}
}
}
| |
//
// Copyright (c) 2004-2021 Jaroslaw Kowalski <[email protected]>, Kim Christensen, Julian Verdurmen
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions
// are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// * Neither the name of Jaroslaw Kowalski nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF
// THE POSSIBILITY OF SUCH DAMAGE.
//
using System;
using System.IO;
using System.Linq;
using NLog.Config;
using NLog.Layouts;
using NLog.Targets;
using Xunit;
namespace NLog.UnitTests.LayoutRenderers
{
public class VariableLayoutRendererTests : NLogTestBase
{
[Fact]
public void Var_from_xml()
{
// Arrange
var logFactory = CreateConfigFromXml();
var logger = logFactory.GetLogger("A");
// Act
logger.Debug("msg");
// Assert
logFactory.AssertDebugLastMessage("msg and admin=realgoodpassword");
Assert.Equal(2, logFactory.Configuration.Variables.Count);
Assert.Equal(2, logFactory.Configuration.Variables.Keys.Count);
Assert.Equal(2, logFactory.Configuration.Variables.Values.Count);
Assert.True(logFactory.Configuration.Variables.ContainsKey("uSeR"));
Assert.True(logFactory.Configuration.Variables.TryGetValue("passWORD", out var _));
}
[Fact]
public void Var_from_xml_and_edit()
{
// Arrange
var logFactory = CreateConfigFromXml();
var logger = logFactory.GetLogger("A");
// Act
logFactory.Configuration.Variables["password"] = "123";
logger.Debug("msg");
// Assert
logFactory.AssertDebugLastMessage("msg and admin=123");
}
[Fact]
public void Var_from_xml_and_clear()
{
// Arrange
var logFactory = CreateConfigFromXml();
var logger = logFactory.GetLogger("A");
// Act
logFactory.Configuration.Variables.Clear();
logger.Debug("msg");
// Assert
logFactory.AssertDebugLastMessage("msg and =");
}
[Fact]
public void Var_with_layout_renderers()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<variable name='user' value='logger=${logger}' />
<variable name='password' value='realgoodpassword' />
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
LogManager.Configuration.Variables["password"] = "123";
var logger = LogManager.GetLogger("A");
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("msg and logger=A=123", lastMessage);
}
[Theory]
[InlineData("myJson", "${MyJson}")]
[InlineData("myJson", "${var:myJSON}")]
public void Var_with_layout(string variableName, string layoutStyle)
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString($@"
<nlog throwExceptions='true'>
<variable name='{variableName}' >
<layout type='JsonLayout'>
<attribute name='short date' layout='${{level}}' />
<attribute name='message' layout='${{message}}' />
</layout>
</variable>
<targets>
<target name='debug' type='Debug' layout='{layoutStyle}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("{ \"short date\": \"Debug\", \"message\": \"msg\" }", lastMessage);
}
[Fact]
public void Var_in_file_target()
{
string tempPath = Path.Combine(Path.GetTempPath(), Guid.NewGuid().ToString());
string logFilePath = Path.Combine(tempPath, "test.log");
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString($@"
<nlog>
<variable name='dir' value='{tempPath}' />
<targets>
<target name='f' type='file' fileName='${{var:dir}}/test.log' layout='${{message}}' lineEnding='LF' />
</targets>
<rules>
<logger name='*' writeTo='f' />
</rules>
</nlog>");
try
{
LogManager.GetLogger("A").Debug("msg");
Assert.True(File.Exists(logFilePath), "Log file was not created at expected file path.");
AssertFileContents(logFilePath, "msg\n", System.Text.Encoding.UTF8);
}
finally
{
File.Delete(logFilePath);
}
}
[Fact]
public void Var_Layout_Target_CallSite()
{
var logFactory = new LogFactory().Setup()
.LoadConfigurationFromXml(@"<nlog throwExceptions='true'>
<variable name='myvar' value='${callsite}' />
<targets>
<target name='debug' type='Debug' layout='${var:myvar}' />
</targets>
<rules>
<logger name='*' minLevel='Debug' writeTo='debug' />
</rules>
</nlog>").LogFactory;
// Act
logFactory.GetCurrentClassLogger().Info("Hello");
// Assert
logFactory.AssertDebugLastMessage(GetType().ToString() + "." + nameof(Var_Layout_Target_CallSite));
}
[Fact]
public void Var_with_other_var()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<variable name='user' value='${var:password}=' />
<variable name='password' value='realgoodpassword' />
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
LogManager.Configuration.Variables["password"] = "123";
var logger = LogManager.GetLogger("A");
// LogManager.ReconfigExistingLoggers();
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("msg and 123==123", lastMessage);
}
[Fact]
public void Var_from_api()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
LogManager.Configuration.Variables["user"] = "admin";
LogManager.Configuration.Variables["password"] = "123";
var logger = LogManager.GetLogger("A");
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("msg and admin=123", lastMessage);
}
[Fact]
public void Var_default()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<variable name='user' value='admin' />
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password:default=unknown}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("msg and admin=unknown", lastMessage);
}
[Fact]
public void Var_default_after_clear()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<variable name='user' value='admin' />
<variable name='password' value='realgoodpassword' />
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password:default=unknown}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
LogManager.Configuration.Variables.Remove("password");
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("msg and admin=unknown", lastMessage);
}
[Fact]
public void Var_default_after_set_null()
{
// Arrange
var logFactory = CreateConfigFromXml();
var logger = logFactory.GetLogger("A");
// Act
logFactory.Configuration.Variables["password"] = null;
logger.Debug("msg");
// Assert
logFactory.AssertDebugLastMessage("msg and admin=");
}
[Fact]
public void Var_default_after_set_emptyString()
{
// Arrange
var logFactory = CreateConfigFromXml();
var logger = logFactory.GetLogger("A");
// Act
logFactory.Configuration.Variables["password"] = "";
logger.Debug("msg");
// Assert
logFactory.AssertDebugLastMessage("msg and admin=");
}
[Fact]
public void Var_default_after_xml_emptyString()
{
LogManager.Configuration = XmlLoggingConfiguration.CreateFromXmlString(@"
<nlog throwExceptions='true'>
<variable name='user' value='admin' />
<variable name='password' value='' />
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>");
var logger = LogManager.GetLogger("A");
logger.Debug("msg");
var lastMessage = GetDebugLastMessage("debug");
Assert.Equal("msg and admin=", lastMessage);
}
[Fact]
public void null_should_be_ok()
{
Layout l = "${var:var1}";
var config = new LoggingConfiguration();
config.Variables["var1"] = null;
l.Initialize(config);
var result = l.Render(LogEventInfo.CreateNullEvent());
Assert.Equal("", result);
}
[Fact]
public void null_should_not_use_default()
{
Layout l = "${var:var1:default=x}";
var config = new LoggingConfiguration();
config.Variables["var1"] = null;
l.Initialize(config);
var result = l.Render(LogEventInfo.CreateNullEvent());
Assert.Equal("", result);
}
[Fact]
public void notset_should_use_default()
{
Layout l = "${var:var1:default=x}";
var config = new LoggingConfiguration();
l.Initialize(config);
var result = l.Render(LogEventInfo.CreateNullEvent());
Assert.Equal("x", result);
}
[Fact]
public void test_with_mockLogManager()
{
LogFactory logFactory = new LogFactory();
var logConfig = new LoggingConfiguration();
var debugTarget = new DebugTarget
{
Name = "t1",
Layout = "${message}|${var:var1:default=x}"
};
logConfig.AddRuleForAllLevels(debugTarget);
logConfig.Variables["var1"] = "my-mocking-manager";
logFactory.Configuration = logConfig;
var logger = logFactory.GetLogger("A");
logger.Debug("msg");
Assert.Equal("msg|my-mocking-manager", debugTarget.LastMessage);
}
private LogFactory CreateConfigFromXml()
{
return new LogFactory().Setup().LoadConfigurationFromXml(@"
<nlog throwExceptions='true'>
<variable name='user' value='admin' />
<variable name='password' value='realgoodpassword' />
<targets>
<target name='debug' type='Debug' layout= '${message} and ${var:user}=${var:password}' /></targets>
<rules>
<logger name='*' minlevel='Debug' writeTo='debug' />
</rules>
</nlog>").LogFactory;
}
}
}
| |
using System;
namespace PalmeralGenNHibernate.EN.Default_
{
public partial class ClienteEN
{
/**
*
*/
private string nif;
/**
*
*/
private string nombre;
/**
*
*/
private string descripcion;
/**
*
*/
private string email;
/**
*
*/
private string localidad;
/**
*
*/
private string provincia;
/**
*
*/
private string pais;
/**
*
*/
private string direccion;
/**
*
*/
private string codigoPostal;
/**
*
*/
private string telefono;
/**
*
*/
private System.Collections.Generic.IList<PalmeralGenNHibernate.EN.Default_.InstalacionEN> instalaciones;
public virtual string Nif {
get { return nif; } set { nif = value; }
}
public virtual string Nombre {
get { return nombre; } set { nombre = value; }
}
public virtual string Descripcion {
get { return descripcion; } set { descripcion = value; }
}
public virtual string Email {
get { return email; } set { email = value; }
}
public virtual string Localidad {
get { return localidad; } set { localidad = value; }
}
public virtual string Provincia {
get { return provincia; } set { provincia = value; }
}
public virtual string Pais {
get { return pais; } set { pais = value; }
}
public virtual string Direccion {
get { return direccion; } set { direccion = value; }
}
public virtual string CodigoPostal {
get { return codigoPostal; } set { codigoPostal = value; }
}
public virtual string Telefono {
get { return telefono; } set { telefono = value; }
}
public virtual System.Collections.Generic.IList<PalmeralGenNHibernate.EN.Default_.InstalacionEN> Instalaciones {
get { return instalaciones; } set { instalaciones = value; }
}
public ClienteEN()
{
instalaciones = new System.Collections.Generic.List<PalmeralGenNHibernate.EN.Default_.InstalacionEN>();
}
public ClienteEN(string nif, string nombre, string descripcion, string email, string localidad, string provincia, string pais, string direccion, string codigoPostal, string telefono, System.Collections.Generic.IList<PalmeralGenNHibernate.EN.Default_.InstalacionEN> instalaciones)
{
this.init (nif, nombre, descripcion, email, localidad, provincia, pais, direccion, codigoPostal, telefono, instalaciones);
}
public ClienteEN(ClienteEN cliente)
{
this.init (cliente.Nif, cliente.Nombre, cliente.Descripcion, cliente.Email, cliente.Localidad, cliente.Provincia, cliente.Pais, cliente.Direccion, cliente.CodigoPostal, cliente.Telefono, cliente.Instalaciones);
}
private void init (string nif, string nombre, string descripcion, string email, string localidad, string provincia, string pais, string direccion, string codigoPostal, string telefono, System.Collections.Generic.IList<PalmeralGenNHibernate.EN.Default_.InstalacionEN> instalaciones)
{
this.Nif = nif;
this.Nombre = nombre;
this.Descripcion = descripcion;
this.Email = email;
this.Localidad = localidad;
this.Provincia = provincia;
this.Pais = pais;
this.Direccion = direccion;
this.CodigoPostal = codigoPostal;
this.Telefono = telefono;
this.Instalaciones = instalaciones;
}
public override bool Equals (object obj)
{
if (obj == null)
return false;
ClienteEN t = obj as ClienteEN;
if (t == null)
return false;
if (Nif.Equals (t.Nif))
return true;
else
return false;
}
public override int GetHashCode ()
{
int hash = 13;
hash += this.Nif.GetHashCode ();
return hash;
}
}
}
| |
using UnityEngine;
using System.Collections;
public delegate void CubeAtlasUVManagerPropertyChange(string propertyName);
//works only with cube;
[ExecuteInEditMode]
public class CubeAtlasUVManager : MonoBehaviour {
public CubeAtlasUVManagerPropertyChange onCubeAtlasUVManagerPropertyChange;
[SerializeField]
private int _rowCount=4;
[SerializeField]
private int _columnCount=4;
[SerializeField]
private UVPosition _front=new UVPosition(0,0);
[SerializeField]
private UVPosition _back=new UVPosition(0,0);
[SerializeField]
private UVPosition _left=new UVPosition(0,0);
[SerializeField]
private UVPosition _right=new UVPosition(0,0);
[SerializeField]
private UVPosition _top=new UVPosition(0,0);
[SerializeField]
private UVPosition _bottom=new UVPosition(0,0);
public UVPosition back {
get {
return this._back;
}
set {
value.onUVPositionPropertyChange +=broadcastPropetyChange;
_back = value;
}
}
public UVPosition bottom {
get {
return this._bottom;
}
set {
value.onUVPositionPropertyChange+=broadcastPropetyChange;
_bottom = value;
}
}
public UVPosition front {
get {
return this._front;
}
set {
value.onUVPositionPropertyChange+=broadcastPropetyChange;
_front = value;
}
}
public UVPosition left {
get {
return this._left;
}
set {
value.onUVPositionPropertyChange+=broadcastPropetyChange;
_left = value;
}
}
public UVPosition right {
get {
return this._right;
}
set {
value.onUVPositionPropertyChange+=broadcastPropetyChange;
_right = value;
}
}
public UVPosition top {
get {
return this._top;
}
set {
value.onUVPositionPropertyChange+=broadcastPropetyChange;
_top = value;
}
}
public int rowCount {
get {
return this._rowCount;
}
set {
if (value<=0){
Debug.LogWarning("rowCount cannot be less than 1, assign 1 to it");
value=1;
}
broadcastPropetyChange("RowCount");
_rowCount = value;
}
}
public int columnCount {
get {
return this._columnCount;
}
set {
if (value<=0){
Debug.LogWarning("columnCount cannot be less than 1, assign 1 to it");
value=1;
}
broadcastPropetyChange("columnCount");
_columnCount = value;
}
}
public static int[] backIndexes=new int[]{1,0,3,3,0,2};
public static int[] topIndexes=new int[]{9,8,5,5,8,4};
public static int[] frontIndexes=new int[]{11,10,7,7,10,6};
public static int[] bottomIndexes=new int[]{14,12,13,13,12,15};
public static int[] leftIndexes=new int[]{18,16,17,17,16,19};
public static int[] rightIndexes=new int[]{22,20,21,21,20,23};
public float uvScaleX{
get{
return 1f/_columnCount;
}
}
public float uvScaleY{
get{
return 1f/_rowCount;
}
}
void Awake(){
onCubeAtlasUVManagerPropertyChange+=onPropertyChangeListener;
}
public void randomizeFaces(int atlasIdLimit){
UVPosition[] positions=new UVPosition[]{_front,_back,_left,_right,_top,_bottom};
for (int i=0;i<positions.Length;i++){
int face=Random.Range(0,atlasIdLimit);
int rotation=Random.Range(0,4);
positions[i].position=face;
positions[i].rotation=rotation;
}
updateMesh();
}
private void onPropertyChangeListener(string propertyName){
updateMesh();
}
public void updateMesh ()
{
Mesh mesh = getMesh ();
Vector2[] uvs = mesh.uv;
updateUVs(ref uvs,backIndexes,back);
updateUVs(ref uvs,topIndexes,top);
updateUVs(ref uvs,frontIndexes,front);
updateUVs(ref uvs,bottomIndexes,bottom);
updateUVs(ref uvs,leftIndexes,left);
updateUVs(ref uvs,rightIndexes,right);
mesh.uv=uvs;
}
Mesh getMesh ()
{
Mesh result=new Mesh();
if (Application.isPlaying){
result = gameObject.GetComponent<MeshFilter>().mesh;
}else{
result=gameObject.GetComponent<MeshFilter>().sharedMesh;
}
return result;
}
private void updateUVs(ref Vector2[] uv,int[] indexes, UVPosition uvPosition){
int y=rowCount - uvPosition.position/columnCount-1;
int x=uvPosition.position%columnCount;
Vector2 botLeft=new Vector2(x*uvScaleX,(y+1)*uvScaleY);
Vector2 botRight=new Vector2((x+1)*uvScaleX,(y+1)*uvScaleY);
Vector2 topRight=new Vector2((x+1)*uvScaleX,y*uvScaleY);
Vector2 topLeft=new Vector2(x*uvScaleX,y*uvScaleY);
Vector2[] rotationArray=new Vector2[8];
rotationArray[0]=topLeft;
rotationArray[1]=topRight;
rotationArray[2]=botRight;
rotationArray[3]=botLeft;
rotationArray[4]=topLeft;
rotationArray[5]=topRight;
rotationArray[6]=botRight;
rotationArray[7]=botLeft;
topLeft=rotationArray[uvPosition.rotation];
topRight=rotationArray[uvPosition.rotation+1];
botRight=rotationArray[uvPosition.rotation+2];
botLeft=rotationArray[uvPosition.rotation+3];
uv[indexes[0]]=botLeft;
uv[indexes[1]]=botRight;
uv[indexes[2]]=topLeft;
uv[indexes[3]]=topLeft;
uv[indexes[4]]=botRight;
uv[indexes[5]]=topRight;
}
void broadcastPropetyChange(string propertyName){
if(onCubeAtlasUVManagerPropertyChange!=null)
onCubeAtlasUVManagerPropertyChange(propertyName);
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Orleans;
using Orleans.GrainDirectory;
using Orleans.Runtime;
using TestGrainInterfaces;
using Orleans.Runtime.Configuration;
using Orleans.TestingHost;
using Xunit;
using Xunit.Abstractions;
using Tester;
// ReSharper disable InconsistentNaming
namespace Tests.GeoClusterTests
{
// We need use ClientWrapper to load a client object in a new app domain.
// This allows us to create multiple clients that are connected to different silos.
[TestCategory("GeoCluster")]
public class GlobalSingleInstanceClusterTests : TestingClusterHost
{
public GlobalSingleInstanceClusterTests(ITestOutputHelper output) : base(output)
{
}
/// <summary>
/// Run all tests on a small configuration (two clusters, one silo each, one client each)
/// </summary>
/// <returns></returns>
[SkippableFact(Skip="https://github.com/dotnet/orleans/issues/4281"), TestCategory("Functional")]
public async Task All_Small()
{
await Setup_Clusters(false);
numGrains = 600;
await RunWithTimeout("IndependentCreation", 5000, IndependentCreation);
await RunWithTimeout("CreationRace", 10000, CreationRace);
await RunWithTimeout("ConflictResolution", 40000, ConflictResolution);
}
/// <summary>
/// Run all tests on a larger configuration (two clusters with 3 or 4 silos, respectively, and two clients each)
/// </summary>
/// <returns></returns>
[SkippableFact]
public async Task All_Large()
{
await Setup_Clusters(true);
numGrains = 2000;
await RunWithTimeout("IndependentCreation", 20000, IndependentCreation);
await RunWithTimeout("CreationRace", 60000, CreationRace);
await RunWithTimeout("ConflictResolution", 120000, ConflictResolution);
}
public class ClientWrapper : ClientWrapperBase
{
public static readonly Func<string, int, string, Action<ClientConfiguration>, Action<IClientBuilder>, ClientWrapper> Factory =
(name, gwPort, clusterId, configUpdater, clientConfgirator) => new ClientWrapper(name, gwPort, clusterId, configUpdater, clientConfgirator);
public ClientWrapper(string name, int gatewayport, string clusterId, Action<ClientConfiguration> customizer, Action<IClientBuilder> clientConfigurator) : base(name, gatewayport, clusterId, customizer, clientConfigurator)
{
this.systemManagement = this.GrainFactory.GetGrain<IManagementGrain>(0);
}
public int CallGrain(int i)
{
var grainRef = this.GrainFactory.GetGrain<IClusterTestGrain>(i);
Task<int> toWait = grainRef.SayHelloAsync();
toWait.Wait();
return toWait.GetResult();
}
public void InjectMultiClusterConf(params string[] args)
{
systemManagement.InjectMultiClusterConfiguration(args).GetResult();
}
IManagementGrain systemManagement;
}
private Random random = new Random();
private int numGrains;
private string cluster0;
private string cluster1;
private ClientWrapper[] clients;
private async Task Setup_Clusters(bool largesetup)
{
await RunWithTimeout("Setup_Clusters", largesetup ? 120000 : 60000, async () =>
{
// use a random global service id for testing purposes
var globalserviceid = Guid.NewGuid();
Action<ClusterConfiguration> configurationcustomizer = (ClusterConfiguration c) =>
{
// run the retry process every 5 seconds to keep this test shorter
c.Globals.GlobalSingleInstanceRetryInterval = TimeSpan.FromSeconds(5);
};
// Create two clusters, each with a single silo.
cluster0 = "cluster0";
cluster1 = "cluster1";
NewGeoCluster(globalserviceid, cluster0, (short)(largesetup ? 3 : 1), configurationcustomizer);
NewGeoCluster(globalserviceid, cluster1, (short)(largesetup ? 4 : 1), configurationcustomizer);
if (!largesetup)
{
// Create one client per cluster
clients = new ClientWrapper[]
{
NewClient<ClientWrapper>(cluster0, 0, ClientWrapper.Factory),
NewClient<ClientWrapper>(cluster1, 0, ClientWrapper.Factory),
};
}
else
{
clients = new ClientWrapper[]
{
NewClient<ClientWrapper>(cluster0, 0, ClientWrapper.Factory),
NewClient<ClientWrapper>(cluster1, 0, ClientWrapper.Factory),
NewClient<ClientWrapper>(cluster0, 1, ClientWrapper.Factory),
NewClient<ClientWrapper>(cluster1, 1, ClientWrapper.Factory),
};
}
await WaitForLivenessToStabilizeAsync();
// Configure multicluster
clients[0].InjectMultiClusterConf(cluster0, cluster1);
await WaitForMultiClusterGossipToStabilizeAsync(false);
});
}
private Task IndependentCreation()
{
int offset = random.Next();
int base_own0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Owned).Count;
int base_own1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Owned).Count;
int base_requested0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int base_requested1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int base_doubtful0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Doubtful).Count;
int base_doubtful1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Doubtful).Count;
WriteLog("Counts: Cluster 0 => Owned={0} Requested={1} Doubtful={2}", base_own0, base_requested0, base_doubtful0);
WriteLog("Counts: Cluster 1 => Owned={0} Requested={1} Doubtful={2}", base_own1, base_requested1, base_doubtful1);
WriteLog("Starting parallel creation of {0} grains", numGrains);
// Create grains on both clusters, using clients round-robin.
Parallel.For(0, numGrains, paralleloptions, i =>
{
int val = clients[i % clients.Count()].CallGrain(offset + i);
Assert.Equal(1, val);
});
// We expect all requests to resolve, and all created activations are in state OWNED
int own0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Owned).Count;
int own1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Owned).Count;
int doubtful0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Doubtful).Count;
int doubtful1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Doubtful).Count;
int requested0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int requested1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.RequestedOwnership).Count;
WriteLog("Counts: Cluster 0 => Owned={0} Requested={1} Doubtful={2}", own0, requested0, doubtful0);
WriteLog("Counts: Cluster 1 => Owned={0} Requested={1} Doubtful={2}", own1, requested1, doubtful1);
// Assert that all grains are in owned state
Assert.Equal(numGrains / 2, own0 - base_own0);
Assert.Equal(numGrains / 2, own1 - base_own1);
Assert.Equal(doubtful0, base_doubtful0);
Assert.Equal(doubtful1, base_doubtful1);
Assert.Equal(requested0, base_requested0);
Assert.Equal(requested1, base_requested1);
return Task.CompletedTask;
}
// This test is for the case where two different clusters are racing,
// trying to activate the same grain.
// This function takes two arguments, a list of client configurations, and an integer. The list of client configurations is used to
// create multiple clients that concurrently call the grains in range [0, numGrains). We run the experiment in a series of barriers.
// The clients all invoke grain "g", in parallel, and then wait on a signal by the main thread (this function). The main thread, then
// wakes up the clients, after which they invoke "g+1", and so on.
private Task CreationRace()
{
WriteLog("Starting ConcurrentCreation");
var offset = random.Next();
// take inventory now so we can exclude pre-existing entries from the validation
var baseline = GetGrainActivations();
// We use two objects to coordinate client threads and the main thread. coordWakeup is an object that is used to signal the coordinator
// thread. toWait is used to signal client threads.
var coordWakeup = new object();
var toWait = new object();
// We keep a list of client threads.
var clientThreads = new List<Tuple<Thread, ClientThreadArgs>>();
var rand = new Random();
var results = new List<Tuple<int, int>>[clients.Length];
threadsDone = results.Length;
int index = 0;
// Create a client thread corresponding to each client
// The client thread will execute ThreadFunc function.
foreach (var client in clients)
{
// A client thread takes a list of tupes<int, int> as argument. The list is an ordered sequence of grains to invoke. tuple.item2
// is the grainId. tuple.item1 is never used (this should probably be cleaned up, but I don't want to break anything :).
var args = new List<Tuple<int, int>>();
for (int j = 0; j < numGrains; ++j)
{
var waitTime = rand.Next(16, 100);
args.Add(Tuple.Create(waitTime, j));
}
// Given a config file, create client starts a client in a new appdomain. We also create a thread on which the client will run.
// The thread takes a "ClientThreadArgs" as argument.
var thread = new Thread(ThreadFunc)
{
IsBackground = true,
Name = $"{this.GetType()}.{nameof(CreationRace)}"
};
var threadFuncArgs = new ClientThreadArgs
{
client = client,
args = args,
resultIndex = index,
numThreads = clients.Length,
coordWakeup = coordWakeup,
toWait = toWait,
results = results,
offset = offset
};
clientThreads.Add(Tuple.Create(thread, threadFuncArgs));
index += 1;
}
// Go through the list of client threads, and start each of the threads with the appropriate arguments.
foreach (var threadNArg in clientThreads)
{
var thread = threadNArg.Item1;
var arg = threadNArg.Item2;
thread.Start(arg);
}
// We run numGrains iterations of the experiment. The coordinator thread calls the function "WaitForWorkers" in order to wait
// for the client threads to finish concurrent calls to a single grain.
for (int i = 0; i < numGrains; ++i)
{
WaitForWorkers(clients.Length, coordWakeup, toWait);
}
// Once the clients threads have finished calling the grain the appropriate number of times, we wait for them to write out their results.
foreach (var threadNArg in clientThreads)
{
var thread = threadNArg.Item1;
thread.Join();
}
var grains = GetGrainActivations(baseline);
ValidateClusterRaceResults(results, grains);
return Task.CompletedTask;
}
private volatile int threadsDone;
private void ValidateClusterRaceResults(List<Tuple<int, int>>[] results, Dictionary<GrainId, List<IActivationInfo>> grains)
{
WriteLog("Validating cluster race results");
// there should be the right number of grains
AssertEqual(numGrains, grains.Count, "number of grains in directory does not match");
// each grain should have one activation per cluster
foreach (var kvp in grains)
{
GrainId key = kvp.Key;
List<IActivationInfo> activations = kvp.Value;
Action error = () =>
{
Assert.True(false, string.Format("grain {0} has wrong activations {1}",
key, string.Join(",", activations.Select(x =>
string.Format("{0}={1}", x.SiloAddress, x.RegistrationStatus)))));
};
// each grain has one activation per cluster
if (activations.Count != 2)
error();
// one should be owned and the other cached
switch (activations[0].RegistrationStatus)
{
case GrainDirectoryEntryStatus.Owned:
if (activations[1].RegistrationStatus != GrainDirectoryEntryStatus.Cached)
error();
break;
case GrainDirectoryEntryStatus.Cached:
if (activations[1].RegistrationStatus != GrainDirectoryEntryStatus.Owned)
error();
break;
default:
error();
break;
}
}
// For each of the results that get, ensure that we see a sequence of values.
foreach (var list in results)
Assert.Equal(numGrains, list.Count);
for (int i = 0; i < numGrains; ++i)
{
var vals = new List<int>();
foreach (var list in results)
vals.Add(list[i].Item2);
vals.Sort();
for (int x = 0; x < results.Length; x++)
AssertEqual(x + 1, vals[x], "expect sequence of results, but got " + string.Join(",", vals));
}
}
// This test is used to test the case where two different clusters are racing,
// trying to activate the same grain, but inter-cluster communication is blocked
// so they both activate an instance
// and one of them deactivated once communication is unblocked
private async Task ConflictResolution()
{
int offset = random.Next();
int base_own0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Owned).Count;
int base_own1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Owned).Count;
int base_requested0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int base_requested1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int base_doubtful0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Doubtful).Count;
int base_doubtful1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Doubtful).Count;
int base_cached0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Cached).Count;
int base_cached1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Cached).Count;
WriteLog("Counts: Cluster 0 => Owned={0} Requested={1} Doubtful={2} Cached={3}", base_own0, base_requested0, base_doubtful0, base_cached0);
WriteLog("Counts: Cluster 1 => Owned={0} Requested={1} Doubtful={2} Cached={3}", base_own1, base_requested1, base_doubtful1, base_cached1);
// take inventory now so we can exclude pre-existing entries from the validation
var baseline = GetGrainActivations();
// Turn off intercluster messaging to simulate a partition.
BlockAllClusterCommunication(cluster0, cluster1);
BlockAllClusterCommunication(cluster1, cluster0);
WriteLog("Starting creation of {0} grains on isolated clusters", numGrains);
Parallel.For(0, numGrains, paralleloptions, i =>
{
int res0, res1, res2, res3;
if (i % 2 == 1)
{
res0 = clients[0].CallGrain(offset + i);
res1 = clients[1].CallGrain(offset + i);
res2 = clients[2 % clients.Length].CallGrain(offset + i);
res3 = clients[3 % clients.Length].CallGrain(offset + i);
}
else
{
res0 = clients[1].CallGrain(offset + i);
res1 = clients[0].CallGrain(offset + i);
res2 = clients[0].CallGrain(offset + i);
res3 = clients[1].CallGrain(offset + i);
}
Assert.Equal(1, res0);
Assert.Equal(1, res1);
Assert.Equal(2, res2);
Assert.Equal(2, res3);
});
// Validate that all the created grains are in DOUBTFUL, one activation in each cluster.
Assert.True(GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Doubtful).Count == numGrains);
Assert.True(GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Doubtful).Count == numGrains);
WriteLog("Restoring inter-cluster communication");
// Turn on intercluster messaging and wait for the resolution to kick in.
UnblockAllClusterCommunication(cluster0);
UnblockAllClusterCommunication(cluster1);
// Wait for anti-entropy to kick in.
// One of the DOUBTFUL activations must be killed, and the other must be converted to OWNED.
await Task.Delay(TimeSpan.FromSeconds(7));
WriteLog("Validation of conflict resolution");
int own0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Owned).Count;
int own1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Owned).Count;
int doubtful0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Doubtful).Count;
int doubtful1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Doubtful).Count;
int requested0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int requested1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.RequestedOwnership).Count;
int cached0 = GetGrainsInClusterWithStatus(cluster0, GrainDirectoryEntryStatus.Cached).Count;
int cached1 = GetGrainsInClusterWithStatus(cluster1, GrainDirectoryEntryStatus.Cached).Count;
WriteLog("Counts: Cluster 0 => Owned={0} Requested={1} Doubtful={2} Cached={3}", own0, requested0, doubtful0, cached0);
WriteLog("Counts: Cluster 1 => Owned={0} Requested={1} Doubtful={2} Cached={3}", own1, requested1, doubtful1, cached1);
AssertEqual(numGrains + base_own0 + base_own1, own0 + own1, "Expecting All are now Owned");
AssertEqual(numGrains, cached0 + cached1 - base_cached0 - base_cached1, "Expecting All Owned have a cached in the other cluster");
AssertEqual(0, doubtful0 + doubtful1 - base_doubtful0 - base_doubtful1, "Expecting No Doubtful");
Assert.Equal(requested0, base_requested0);
Assert.Equal(requested1, base_requested1);
// each grain should have one activation per cluster
var grains = GetGrainActivations(baseline);
foreach (var kvp in grains)
{
GrainId key = kvp.Key;
List<IActivationInfo> activations = kvp.Value;
Action error = () =>
{
Assert.True(false, string.Format("grain {0} has wrong activations {1}",
key, string.Join(",", activations.Select(x =>
string.Format("{0}={1}", x.SiloAddress, x.RegistrationStatus)))));
};
// each grain has one activation per cluster
if (activations.Count != 2)
error();
// one should be owned and the other cached
switch (activations[0].RegistrationStatus)
{
case GrainDirectoryEntryStatus.Owned:
if (activations[1].RegistrationStatus != GrainDirectoryEntryStatus.Cached)
error();
break;
case GrainDirectoryEntryStatus.Cached:
if (activations[1].RegistrationStatus != GrainDirectoryEntryStatus.Owned)
error();
break;
default:
error();
break;
}
}
// ensure that the grain whose DOUBTFUL activation was killed,
// now refers to the 'real' remote OWNED activation.
for (int i = 0; i < numGrains; i++)
{
int res0, res1, res2, res3;
if (i % 2 == 1)
{
res0 = clients[0].CallGrain(offset + i);
res1 = clients[1].CallGrain(offset + i);
res2 = clients[2 % clients.Length].CallGrain(offset + i);
res3 = clients[3 % clients.Length].CallGrain(offset + i);
}
else
{
res0 = clients[1].CallGrain(offset + i);
res1 = clients[0].CallGrain(offset + i);
res2 = clients[0].CallGrain(offset + i);
res3 = clients[1].CallGrain(offset + i);
}
//From the previous grain calls, the last value of the counter in each grain was 2.
//So here should be sequenced from 3.
Assert.Equal(3, res0);
Assert.Equal(4, res1);
Assert.Equal(5, res2);
Assert.Equal(6, res3);
}
}
private List<GrainId> GetGrainsInClusterWithStatus(string clusterId, GrainDirectoryEntryStatus? status = null)
{
List<GrainId> grains = new List<GrainId>();
var silos = Clusters[clusterId].Cluster.GetActiveSilos();
int totalSoFar = 0;
foreach (var silo in silos)
{
var hooks = ((AppDomainSiloHandle)silo).AppDomainTestHook;
var dir = hooks.GetDirectoryForTypeNamesContaining("ClusterTestGrain");
foreach (var grainKeyValue in dir)
{
GrainId grainId = grainKeyValue.Key;
IGrainInfo grainInfo = grainKeyValue.Value;
ActivationId actId = grainInfo.Instances.First().Key;
IActivationInfo actInfo = grainInfo.Instances[actId];
if (grainId.IsSystemTarget || grainId.IsClient || !grainId.IsGrain)
{
// Skip system grains, system targets and clients
// which never go through cluster-single-instance registration process
continue;
}
if (!status.HasValue || actInfo.RegistrationStatus == status)
{
grains.Add(grainId);
}
}
WriteLog("Returning: Silo {0} State = {1} Count = {2}", silo.SiloAddress, status.HasValue ? status.Value.ToString() : "ANY", (grains.Count - totalSoFar));
totalSoFar = grains.Count;
}
WriteLog("Returning: Cluster {0} State = {1} Count = {2}", clusterId, status.HasValue ? status.Value.ToString() : "ANY", grains.Count);
return grains;
}
private Dictionary<GrainId, List<IActivationInfo>> GetGrainActivations(Dictionary<GrainId, List<IActivationInfo>> exclude = null)
{
var grains = new Dictionary<GrainId, List<IActivationInfo>>();
int instanceCount = 0;
foreach (var kvp in Clusters)
{
foreach (var silo in kvp.Value.Silos)
{
var hooks = ((AppDomainSiloHandle) silo).AppDomainTestHook;
var dir = hooks.GetDirectoryForTypeNamesContaining("ClusterTestGrain");
foreach (var grainKeyValue in dir)
{
GrainId grainId = grainKeyValue.Key;
IGrainInfo grainInfo = grainKeyValue.Value;
if (exclude != null && exclude.ContainsKey(grainId))
continue;
if (!grains.TryGetValue(grainId, out var activations))
grains[grainId] = activations = new List<IActivationInfo>();
foreach (var instanceInfo in grainInfo.Instances)
{
activations.Add(instanceInfo.Value);
instanceCount++;
}
}
}
}
WriteLog("Returning: {0} instances for {1} grains", instanceCount, grains.Count());
return grains;
}
// This is a helper function which is used to run the race condition tests. This function waits for all client threads trying to create the
// same activation to finish. The last client thread to finish will wakeup the coordinator thread.
private void WaitForCoordinator(int numThreads, object coordWakeup, object toWait)
{
Monitor.Enter(coordWakeup);
Monitor.Enter(toWait);
threadsDone -= 1;
if (threadsDone == 0)
{
Monitor.Pulse(coordWakeup);
}
Monitor.Exit(coordWakeup);
Monitor.Wait(toWait);
Monitor.Exit(toWait);
}
// This is a helper function which is used to signal the worker client threads to run another iteration of our concurrent experiment.
private void WaitForWorkers(int numThreads, object coordWakeup, object toWait)
{
Monitor.Enter(coordWakeup);
while (threadsDone != 0)
{
Monitor.Wait(coordWakeup);
}
threadsDone = numThreads;
Monitor.Exit(coordWakeup);
Monitor.Enter(toWait);
Monitor.PulseAll(toWait);
Monitor.Exit(toWait);
}
// ClientThreadArgs is a set of arguments which is used by a client thread which is concurrently running with other client threads. We
// use client threads in order to simulate race conditions.
private class ClientThreadArgs
{
public ClientWrapper client;
public IEnumerable<Tuple<int, int>> args;
public int resultIndex;
public int numThreads;
public object coordWakeup;
public object toWait;
public List<Tuple<int, int>>[] results;
public int offset;
}
// Each client thread which is concurrently trying to create a sequence of grains with other clients runs this function.
private void ThreadFunc(object obj)
{
var threadArg = (ClientThreadArgs)obj;
var resultList = new List<Tuple<int, int>>();
// Go through the sequence of arguments one by one.
foreach (var arg in threadArg.args)
{
try
{
// Call the appropriate grain.
var grainId = arg.Item2;
int ret = threadArg.client.CallGrain(threadArg.offset + grainId);
Debug.WriteLine("*** Result = {0}", ret);
// Keep the result in resultList.
resultList.Add(Tuple.Create(grainId, ret));
}
catch (Exception e)
{
WriteLog("Caught exception: {0}", e);
}
// Finally, wait for the coordinator to kick-off another round of the test.
WaitForCoordinator(threadArg.numThreads, threadArg.coordWakeup, threadArg.toWait);
}
// Track the results for validation.
lock (threadArg.results)
{
threadArg.results[threadArg.resultIndex] = resultList;
}
}
}
}
| |
/*
* Copyright (c) 2015, Wisconsin Robotics
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of Wisconsin Robotics nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL WISCONSIN ROBOTICS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Threading;
using System.Collections.ObjectModel;
using BadgerJaus.Messages;
using BadgerJaus.Messages.LocalWaypointDriver;
using BadgerJaus.Messages.ListManager;
using BadgerJaus.Messages.LocalVectorDriver;
using BadgerJaus.Messages.Driver;
using BadgerJaus.Services.Core;
using BadgerJaus.Util;
namespace BadgerJaus.Services.Mobility
{
public class LocalWaypointListDriver : BaseService
{
public const string SERVICE_NAME = "LocalWaypointListDriver";
public const String SERVICE_VERSION = "1.0";
public const String PARENT_SERVICE = "ListManager";
protected LinkedList<WaypointElement> waypointList = new LinkedList<WaypointElement>();
protected WaypointElement currentWaypointElement = null;
protected int waypointIndex = -1;
private JausByte requestID = new JausByte();
protected LocalPoseSensor localPoseSensor = null;
protected double xOrigin = 0;
protected double yOrigin = 0;
protected double zOrigin = 0;
protected double rollOrigin = 0;
protected double pitchOrigin = 0;
protected double yawOrigin = 0;
protected double speed;
protected double desiredSpeed;
public LocalWaypointListDriver(LocalPoseSensor localPoseSensor)
{
this.localPoseSensor = localPoseSensor;
}
protected override string OVERRIDE_SERVICE_NAME
{
get { return SERVICE_NAME; }
}
protected override string OVERRIDE_SERVICE_FAMILY
{
get { return MOBILITY_SERVICE; }
}
public override bool IsSupported(int commandCode)
{
// TODO Auto-generated method stub
return false;
}
public override bool ImplementsAndHandledMessage(Message message, Component component)
{
Message returnMessage = null;
bool handled = true;
switch (message.GetCommandCode())
{
case JausCommandCode.SET_LOCAL_WAYPOINT:
SetLocalWaypoint setLocalWaypoint = new SetLocalWaypoint();
setLocalWaypoint.SetFromJausMessage(message);
return HandleSetLocalWaypoint(setLocalWaypoint);
case JausCommandCode.SET_ELEMENT:
SetElement setElement = new SetElement();
setElement.SetFromJausMessage(message);
handled = HandleSetElement(setElement);
break;
case JausCommandCode.REPORT_ELEMENT_LIST:
ReportElementList reportElementList = new ReportElementList();
reportElementList.SetRequestID((int)requestID.Value);
returnMessage = reportElementList;
break;
case JausCommandCode.QUERY_ACTIVE_ELEMENT:
return HandleQueryActiveElement(message);
case JausCommandCode.QUERY_ELEMENT_COUNT:
ReportElementCount reportElementCount = new ReportElementCount();
reportElementCount.SetElementCount(waypointList.Count);
returnMessage = reportElementCount;
break;
case JausCommandCode.EXECUTE_LIST:
if (!ExecuteList()) //Must be overridden somehow
return true;
waypointIndex = 0;
currentWaypointElement = waypointList.First.Value;
break;
case JausCommandCode.QUERY_LOCAL_WAYPOINT:
QueryLocalWaypoint queryLocalWaypoint = new QueryLocalWaypoint();
queryLocalWaypoint.SetFromJausMessage(message);
return HandleQueryLocalWaypoint(queryLocalWaypoint);
case JausCommandCode.QUERY_TRAVEL_SPEED:
return HandleQueryTravelSpeed(message);
case JausCommandCode.SET_TRAVEL_SPEED:
SetTravelSpeed setTravelSpeed = new SetTravelSpeed();
setTravelSpeed.SetFromJausMessage(message);
return HandleSetTravelSpeed(setTravelSpeed);
default:
handled = false;
break;
}
if (returnMessage != null)
{
returnMessage.SetSource(message.GetDestination());
returnMessage.SetDestination(message.GetSource());
Transport.SendMessage(returnMessage);
}
return handled;
}
public bool HandleSetLocalWaypoint(SetLocalWaypoint message)
{
if (message.IsFieldSet(SetLocalWaypoint.X_BIT))
xOrigin = message.GetX();
if (message.IsFieldSet(SetLocalWaypoint.Y_BIT))
yOrigin = message.GetY();
if (message.IsFieldSet(SetLocalWaypoint.Z_BIT))
zOrigin = message.GetY();
if (message.IsFieldSet(SetLocalWaypoint.ROLL_BIT))
rollOrigin = message.GetRoll();
if (message.IsFieldSet(SetLocalWaypoint.PITCH_BIT))
pitchOrigin = message.GetPitch();
if (message.IsFieldSet(SetLocalWaypoint.YAW_BIT))
yawOrigin = message.GetY();
return true;
}
public bool HandleSetElement(SetElement message)
{
#warning Completely broken
requestID.Value = message.GetRequestID();
List<JausElement> elements = message.GetElements();
JausUnsignedShort elementCommandCode = new JausUnsignedShort();
foreach (JausElement element in elements)
{
byte[] data = element.getElementData();
//elementCommandCode.Deserialize(data);
if (!(elementCommandCode.Value == JausCommandCode.SET_LOCAL_WAYPOINT))
continue;
//Commented out DUE to visibility modifier
//SetLocalWaypoint setLocalWaypoint = new SetLocalWaypoint();
//setLocalWaypoint.setPayloadFromJausBuffer(data, JausBaseType.SHORT_BYTE_SIZE);
//WaypointElement waypoint = new WaypointElement(element, setLocalWaypoint);
//waypointList.AddLast(waypoint);
//Console.WriteLine("Adding element waypoint, current count: " + waypointList.Count);
}
ConfirmElementRequest confirmElementRequest = new ConfirmElementRequest();
confirmElementRequest.SetSource(message.GetDestination());
confirmElementRequest.SetDestination(message.GetSource());
confirmElementRequest.SetRequestID(message.GetRequestID());
Transport.SendMessage(confirmElementRequest);
return true;
}
public bool HandleQueryLocalWaypoint(QueryLocalWaypoint message)
{
ReportLocalWaypoint reportLocalWaypoint = new ReportLocalWaypoint();
reportLocalWaypoint.SetDestination(message.GetSource());
reportLocalWaypoint.SetSource(message.GetDestination());
WaypointElement waypointElement = currentWaypointElement;
if (waypointElement == null) return true;
if (message.IsFieldSet(ReportLocalWaypoint.X_BIT))
reportLocalWaypoint.SetX(waypointElement.GetX());
if (message.IsFieldSet(ReportLocalWaypoint.Y_BIT))
reportLocalWaypoint.SetY(waypointElement.GetY());
/*
if(message.IsFieldSet(ReportLocalWaypoint.Z_BIT))
reportLocalWaypoint.SetZ(z);
if(message.IsFieldSet(ReportLocalWaypoint.ROLL_BIT))
reportLocalWaypoint.SetRoll(roll);
if(message.IsFieldSet(ReportLocalWaypoint.PITCH_BIT))
reportLocalWaypoint.SetPitch(pitch);
*/
if (message.IsFieldSet(ReportLocalWaypoint.YAW_BIT))
reportLocalWaypoint.SetYaw(waypointElement.GetYaw());
Transport.SendMessage(reportLocalWaypoint);
return true;
}
public bool HandleQueryTravelSpeed(Message message)
{
ReportTravelSpeed reportTravelSpeed = new ReportTravelSpeed();
reportTravelSpeed.SetDestination(message.GetSource());
reportTravelSpeed.SetSource(message.GetDestination());
reportTravelSpeed.SetSpeed(speed);
Transport.SendMessage(reportTravelSpeed);
return true;
}
public bool HandleSetTravelSpeed(SetTravelSpeed message)
{
desiredSpeed = message.GetSpeed();
return true;
}
public bool HandleQueryActiveElement(Message message)
{
ReportActiveElement reportActiveElement = new ReportActiveElement();
if (currentWaypointElement != null)
reportActiveElement.SetElementID(currentWaypointElement.element.getElementUID());
return true;
}
public bool ExecuteList()
{
waypointIndex = 0;
currentWaypointElement = waypointList.First.Value;
return true;
}
public WaypointElement IncrementList()
{
int currentIndex = Interlocked.Increment(ref waypointIndex);
if (currentIndex >= waypointList.Count)
return null;
LinkedList<WaypointElement>.Enumerator itr = waypointList.GetEnumerator();
int count = 0;
while (count <= currentIndex + 1)
{
count++;
itr.MoveNext();
}
WaypointElement currentWaypoint = itr.Current;
currentWaypointElement = currentWaypoint;
return currentWaypoint;
}
public void SetActiveWaypoint(int index)
{
waypointIndex = index;
LinkedList<WaypointElement>.Enumerator itr = waypointList.GetEnumerator();
int count = 0;
while (count <= index + 1)
{
count++;
itr.MoveNext();
}
currentWaypointElement = itr.Current;
}
public ReadOnlyCollection<WaypointElement> GetWaypoints()
{
//return Collections.unmodifiableList(waypointList);
List<WaypointElement> li = new List<WaypointElement>();
LinkedList<WaypointElement>.Enumerator itr = waypointList.GetEnumerator();
while (!itr.MoveNext()) li.Add(itr.Current);
return li.AsReadOnly();
}
public WaypointElement GetCurrentWaypoint()
{
return currentWaypointElement;
}
public override string ToString()
{
return "Local Waypoint List Driver";
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Xunit;
using System;
using System.Reflection;
using System.Collections.Generic;
#pragma warning disable 0414
namespace System.Reflection.Tests
{
public class PropertyInfoGetValueTests
{
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns correct value for static property.
[Fact]
public static void TestGetValue1()
{
string propertyName = "PropertyA";
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
var value = pi.GetValue(typeof(MyCoVariantTest), (Object[])null);
Assert.NotNull(value);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns correct value for static property.
[Fact]
public static void TestGetValue2()
{
string propertyName = "PropertyA";
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
String[] strs = new String[1];
strs[0] = "hello";
//set value
pi.SetValue(null, strs, (Object[])null);
var value = pi.GetValue(typeof(MyCoVariantTest), (Object[])null);
String[] strs2 = (String[])value;
Assert.Equal(strs2[0], strs[0]);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns correct value for non-static property.
[Fact]
public static void TestGetValue3()
{
string propertyName = "PropertyB";
Object obj = (MyCoVariantTest)new MyCoVariantTest();
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
var value = pi.GetValue(obj, (Object[])null);
Assert.Null(value);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns value for non-static property.
[Fact]
public static void TestGetValue4()
{
string propertyName = "PropertyB";
Object obj = (MyCoVariantTest)new MyCoVariantTest();
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
String[] strs = new String[1];
strs[0] = "hello";
//set value
pi.SetValue(obj, strs, (Object[])null);
var value = pi.GetValue(obj, (Object[])null);
String[] strs2 = (String[])value;
Assert.Equal(strs2[0], strs[0]);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index)returns correct value for Interface property
[Fact]
public static void TestGetValue5()
{
string propertyName = "Name";
Object obj = (InterfacePropertyImpl)new InterfacePropertyImpl();
PropertyInfo pi = GetProperty(typeof(InterfacePropertyImpl), propertyName);
Assert.Equal(pi.Name, propertyName);
var value = pi.GetValue(obj, (Object[])null);
Assert.Null(value);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns correct value for Interface property
[Fact]
public static void TestGetValue6()
{
string propertyName = "Name";
Object obj = (InterfacePropertyImpl)new InterfacePropertyImpl();
PropertyInfo pi = GetProperty(typeof(InterfacePropertyImpl), propertyName);
Assert.Equal(pi.Name, propertyName);
//set value
String strs1 = "hello";
pi.SetValue(obj, strs1, (Object[])null);
var value = pi.GetValue(obj, (Object[])null);
String strs2 = (String)value;
Assert.Equal(strs2, strs1);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns correct value for property
[Fact]
public static void TestGetValue7()
{
string propertyName = "PropertyC";
Object obj = Activator.CreateInstance(typeof(MyCoVariantTest));
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
var value = pi.GetValue(obj, new Object[] { 1, "2" });
Assert.Null(value);
}
//Verify PropertyInfo.GetValue(Object obj , object[] index) returns correct value for property
[Fact]
public static void TestGetValue8()
{
string propertyName = "PropertyC";
Object obj = Activator.CreateInstance(typeof(MyCoVariantTest));
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
var value = pi.GetValue(obj, new Object[] { 1, "2" });
Assert.Null(value);
}
//
// Negative Tests for PropertyInfo
//
//Verify PropertyInfo.GetValue throws ParameterCountException
[Fact]
public static void TestGetValue9()
{
string propertyName = "PropertyC";
Object obj = Activator.CreateInstance(typeof(MyCoVariantTest));
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
Assert.Throws<TargetParameterCountException>(() =>
{
var value = pi.GetValue(obj, new Object[] { 1, "2", 3 });
});
}
//Verify PropertyInfo.GetValue throws ParameterCountException
[Fact]
public static void TestGetValue10()
{
string propertyName = "PropertyC";
Object obj = Activator.CreateInstance(typeof(MyCoVariantTest));
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
Assert.Throws<TargetParameterCountException>(() =>
{
var value = pi.GetValue(obj, null);
});
}
//Verify PropertyInfo.GetValue throws ArgumentException
[Fact]
public static void TestGetValue11()
{
string propertyName = "PropertyC";
Object obj = Activator.CreateInstance(typeof(MyCoVariantTest));
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
Assert.Throws<ArgumentException>(() =>
{
var value = pi.GetValue(obj, new Object[] { "1", "2" }); ;
});
}
//Verify PropertyInfo.GetValue throws TargetException
[Fact]
public static void TestGetValue12()
{
string propertyName = "PropertyC";
Object obj = Activator.CreateInstance(typeof(MyCoVariantTest));
PropertyInfo pi = GetProperty(typeof(MyCoVariantTest), propertyName);
Assert.Equal(pi.Name, propertyName);
// In Win8p instead of TargetException , generic Exception is thrown
// Refer http://msdn.microsoft.com/en-us/library/b05d59ty.aspx
Assert.ThrowsAny<Exception>(() => pi.GetValue(null, new Object[] { "1", "2" }));
}
//Verify PropertyInfo.GetValue throws ArgumentException
[Fact]
public static void TestGetValue13()
{
string propertyName = "Property1";
Object obj = new LaterClass();
PropertyInfo pi = GetProperty(typeof(LaterClass), propertyName);
Assert.Equal(pi.Name, propertyName);
Assert.Throws<ArgumentException>(() =>
{
var value = pi.GetValue(obj, null); ;
});
}
//Verify PropertyInfo.GetValue() returns hardcoded value
[Fact]
public static void TestGetValue14()
{
string propertyName = "Property2";
Object obj = new LaterClass();
PropertyInfo pi = GetProperty(typeof(LaterClass), propertyName);
Assert.Equal(pi.Name, propertyName);
int value = (int)pi.GetValue(obj);
Assert.Equal(value, 100);
}
//Verify PropertyInfo.GetValue(Object obj) returns correct value for Interface property
[Fact]
public static void TestGetValue15()
{
string propertyName = "Name";
Object obj = (InterfacePropertyImpl)new InterfacePropertyImpl();
PropertyInfo pi = GetProperty(typeof(InterfacePropertyImpl), propertyName);
Assert.Equal(pi.Name, propertyName);
//set value
String strs1 = "hello";
pi.SetValue(obj, strs1);
var value = pi.GetValue(obj);
String strs2 = (String)value;
Assert.Equal(strs2, strs1);
}
// Gets PropertyInfo object from current class
public static PropertyInfo getProperty(string property)
{
return GetProperty(typeof(PropertyInfoGetValueTests), property);
}
//Gets PropertyInfo object from a Type
public static PropertyInfo GetProperty(Type t, string property)
{
TypeInfo ti = t.GetTypeInfo();
IEnumerator<PropertyInfo> allproperties = ti.DeclaredProperties.GetEnumerator();
PropertyInfo pi = null;
while (allproperties.MoveNext())
{
if (allproperties.Current.Name.Equals(property))
{
//found property
pi = allproperties.Current;
break;
}
}
return pi;
}
}
//Reflection Metadata
public class MyCoVariantTest
{
public static Object[] objArr = new Object[1];
public Object[] objArr2;
public static Object[] PropertyA
{
get { return objArr; }
set { objArr = value; }
}
public Object[] PropertyB
{
get { return objArr2; }
set { objArr2 = value; }
}
[System.Runtime.CompilerServices.IndexerNameAttribute("PropertyC")] // will make the property name be MyPropAA instead of default Item
public Object[] this[int index, String s]
{
get { return objArr2; }
set { objArr2 = value; }
}
}
public interface InterfaceProperty
{
String Name
{
get;
set;
}
}
public class InterfacePropertyImpl : InterfaceProperty
{
private String _name = null;
public String Name
{
get { return _name; }
set { _name = value; }
}
}
public class LaterClass
{
public int Property1
{
set { }
}
public int Property2
{
private get { return 100; }
set { }
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
using Directory = Lucene.Net.Store.Directory;
using IndexOutput = Lucene.Net.Store.IndexOutput;
using UnicodeUtil = Lucene.Net.Util.UnicodeUtil;
namespace Lucene.Net.Index
{
/// <summary>This stores a monotonically increasing set of <Term, TermInfo> pairs in a
/// Directory. A TermInfos can be written once, in order.
/// </summary>
public sealed class TermInfosWriter
{
/// <summary>The file format version, a negative number. </summary>
public const int FORMAT = - 3;
// changed strings to true utf8 with length in bytes not length in chars
public const int FORMAT_VERSION_UTF8_LENGTH_IN_BYTES = -4;
// NOTE: always chage this if you switch to a new format!
public static readonly int FORMAT_CURRENT = FORMAT_VERSION_UTF8_LENGTH_IN_BYTES;
private FieldInfos fieldInfos;
private IndexOutput output;
private TermInfo lastTi = new TermInfo();
private long size;
// TODO: the default values for these two parameters should be settable from
// IndexWriter. However, once that's done, folks will start setting them to
// ridiculous values and complaining that things don't work well, as with
// mergeFactor. So, let's wait until a number of folks find that alternate
// values work better. Note that both of these values are stored in the
// segment, so that it's safe to change these w/o rebuilding all indexes.
/// <summary>Expert: The fraction of terms in the "dictionary" which should be stored
/// in RAM. Smaller values use more memory, but make searching slightly
/// faster, while larger values use less memory and make searching slightly
/// slower. Searching is typically not dominated by dictionary lookup, so
/// tweaking this is rarely useful.
/// </summary>
internal int indexInterval = 128;
/// <summary>Expert: The fraction of {@link TermDocs} entries stored in skip tables,
/// used to accellerate {@link TermDocs#SkipTo(int)}. Larger values result in
/// smaller indexes, greater acceleration, but fewer accelerable cases, while
/// smaller values result in bigger indexes, less acceleration and more
/// accelerable cases. More detailed experiments would be useful here.
/// </summary>
internal int skipInterval = 16;
/// <summary>Expert: The maximum number of skip levels. Smaller values result in
/// slightly smaller indexes, but slower skipping in big posting lists.
/// </summary>
internal int maxSkipLevels = 10;
private long lastIndexPointer;
private bool isIndex;
private byte[] lastTermBytes = new byte[10];
private int lastTermBytesLength = 0;
private int lastFieldNumber = -1;
private TermInfosWriter other;
private UnicodeUtil.UTF8Result utf8Result = new UnicodeUtil.UTF8Result();
public TermInfosWriter(Directory directory, System.String segment, FieldInfos fis, int interval)
{
Initialize(directory, segment, fis, interval, false);
other = new TermInfosWriter(directory, segment, fis, interval, true);
other.other = this;
}
private TermInfosWriter(Directory directory, System.String segment, FieldInfos fis, int interval, bool isIndex)
{
Initialize(directory, segment, fis, interval, isIndex);
}
private void Initialize(Directory directory, System.String segment, FieldInfos fis, int interval, bool isi)
{
indexInterval = interval;
fieldInfos = fis;
isIndex = isi;
output = directory.CreateOutput(segment + (isIndex ? ".tii" : ".tis"));
output.WriteInt(FORMAT_CURRENT); // write format
output.WriteLong(0); // leave space for size
output.WriteInt(indexInterval); // write indexInterval
output.WriteInt(skipInterval); // write skipInterval
output.WriteInt(maxSkipLevels); // write maxSkipLevels
System.Diagnostics.Debug.Assert(InitUTF16Results());
}
internal void Add(Term term, TermInfo ti)
{
UnicodeUtil.UTF16toUTF8(term.text, 0, term.text.Length, utf8Result);
Add(fieldInfos.FieldNumber(term.field), utf8Result.result, utf8Result.length, ti);
}
// currently used only by assert statements
UnicodeUtil.UTF16Result utf16Result1;
UnicodeUtil.UTF16Result utf16Result2;
// currently used only by assert statements
private bool InitUTF16Results()
{
utf16Result1 = new UnicodeUtil.UTF16Result();
utf16Result2 = new UnicodeUtil.UTF16Result();
return true;
}
// Currently used only by assert statement
private int CompareToLastTerm(int fieldNumber, byte[] termBytes, int termBytesLength)
{
if (lastFieldNumber != fieldNumber)
{
int cmp = String.CompareOrdinal(fieldInfos.FieldName(lastFieldNumber), fieldInfos.FieldName(fieldNumber));
// If there is a field named "" (empty string) then we
// will get 0 on this comparison, yet, it's "OK". But
// it's not OK if two different field numbers map to
// the same name.
if (cmp != 0 || lastFieldNumber != - 1)
return cmp;
}
UnicodeUtil.UTF8toUTF16(lastTermBytes, 0, lastTermBytesLength, utf16Result1);
UnicodeUtil.UTF8toUTF16(termBytes, 0, termBytesLength, utf16Result2);
int len;
if (utf16Result1.length < utf16Result2.length)
len = utf16Result1.length;
else
len = utf16Result2.length;
for (int i = 0; i < len; i++)
{
char ch1 = utf16Result1.result[i];
char ch2 = utf16Result2.result[i];
if (ch1 != ch2)
return ch1 - ch2;
}
return utf16Result1.length - utf16Result2.length;
}
/// <summary>Adds a new <<fieldNumber, termBytes>, TermInfo> pair to the set.
/// Term must be lexicographically greater than all previous Terms added.
/// TermInfo pointers must be positive and greater than all previous.
/// </summary>
internal void Add(int fieldNumber, byte[] termBytes, int termBytesLength, TermInfo ti)
{
System.Diagnostics.Debug.Assert(CompareToLastTerm(fieldNumber, termBytes, termBytesLength) < 0 ||
(isIndex && termBytesLength == 0 && lastTermBytesLength == 0),
"Terms are out of order: field=" + fieldInfos.FieldName(fieldNumber) + "(number " + fieldNumber + ")" +
" lastField=" + fieldInfos.FieldName(lastFieldNumber) + " (number " + lastFieldNumber + ")" +
" text=" + System.Text.Encoding.UTF8.GetString(termBytes, 0, termBytesLength) + " lastText=" + System.Text.Encoding.UTF8.GetString(lastTermBytes, 0, lastTermBytesLength));
System.Diagnostics.Debug.Assert(ti.freqPointer >= lastTi.freqPointer, "freqPointer out of order (" + ti.freqPointer + " < " + lastTi.freqPointer + ")");
System.Diagnostics.Debug.Assert(ti.proxPointer >= lastTi.proxPointer, "proxPointer out of order (" + ti.proxPointer + " < " + lastTi.proxPointer + ")");
if (!isIndex && size % indexInterval == 0)
other.Add(lastFieldNumber, lastTermBytes, lastTermBytesLength, lastTi); // add an index term
WriteTerm(fieldNumber, termBytes, termBytesLength); // write term
output.WriteVInt(ti.docFreq); // write doc freq
output.WriteVLong(ti.freqPointer - lastTi.freqPointer); // write pointers
output.WriteVLong(ti.proxPointer - lastTi.proxPointer);
if (ti.docFreq >= skipInterval)
{
output.WriteVInt(ti.skipOffset);
}
if (isIndex)
{
output.WriteVLong(other.output.GetFilePointer() - lastIndexPointer);
lastIndexPointer = other.output.GetFilePointer(); // write pointer
}
lastFieldNumber = fieldNumber;
lastTi.Set(ti);
size++;
}
private void WriteTerm(int fieldNumber, byte[] termBytes, int termBytesLength)
{
// TODO: UTF16toUTF8 could tell us this prefix
// Compute prefix in common with last term:
int start = 0;
int limit = termBytesLength < lastTermBytesLength ? termBytesLength : lastTermBytesLength;
while (start < limit)
{
if (termBytes[start] != lastTermBytes[start])
break;
start++;
}
int length = termBytesLength - start;
output.WriteVInt(start); // write shared prefix length
output.WriteVInt(length); // write delta length
output.WriteBytes(termBytes, start, length); // write delta chars
output.WriteVInt(fieldNumber); // write field num
if (lastTermBytes.Length < termBytesLength)
{
byte[] newArray = new byte[(int)(termBytesLength*1.5)];
Array.Copy(lastTermBytes, 0, newArray, 0, start);
lastTermBytes = newArray;
}
Array.Copy(termBytes, start, lastTermBytes, start, length);
lastTermBytesLength = termBytesLength;
}
/// <summary>Called to complete TermInfos creation. </summary>
internal void Close()
{
output.Seek(4); // write size after format
output.WriteLong(size);
output.Close();
if (!isIndex)
other.Close();
}
}
}
| |
// <copyright file="Cookie.cs" company="WebDriver Committers">
// Copyright 2007-2011 WebDriver committers
// Copyright 2007-2011 Google Inc.
// Portions copyright 2011 Software Freedom Conservancy
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// </copyright>
using System;
using System.Collections.Generic;
using System.Globalization;
using Newtonsoft.Json;
using OpenQA.Selenium.Internal;
namespace OpenQA.Selenium
{
/// <summary>
/// Represents a cookie in the browser.
/// </summary>
[Serializable]
[JsonObject(MemberSerialization = MemberSerialization.OptIn)]
public class Cookie
{
private string cookieName;
private string cookieValue;
private string cookiePath;
private string cookieDomain;
private DateTime? cookieExpiry;
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name,
/// value, domain, path and expiration date.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <param name="domain">The domain of the cookie.</param>
/// <param name="path">The path of the cookie.</param>
/// <param name="expiry">The expiration date of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value, string domain, string path, DateTime? expiry)
{
if (string.IsNullOrEmpty(name))
{
throw new ArgumentException("Cookie name cannot be null or empty string", "name");
}
if (value == null)
{
throw new ArgumentNullException("value", "Cookie value cannot be null");
}
if (name.IndexOf(';') != -1)
{
throw new ArgumentException("Cookie names cannot contain a ';': " + name, "name");
}
this.cookieName = name;
this.cookieValue = value;
if (!string.IsNullOrEmpty(path))
{
this.cookiePath = path;
}
this.cookieDomain = StripPort(domain);
if (expiry != null)
{
this.cookieExpiry = expiry;
}
}
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name,
/// value, path and expiration date.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <param name="path">The path of the cookie.</param>
/// <param name="expiry">The expiration date of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value, string path, DateTime? expiry)
: this(name, value, null, path, expiry)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name,
/// value, and path.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <param name="path">The path of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value, string path)
: this(name, value, path, null)
{
}
/// <summary>
/// Initializes a new instance of the <see cref="Cookie"/> class with a specific name and value.
/// </summary>
/// <param name="name">The name of the cookie.</param>
/// <param name="value">The value of the cookie.</param>
/// <exception cref="ArgumentException">If the name is <see langword="null"/> or an empty string,
/// or if it contains a semi-colon.</exception>
/// <exception cref="ArgumentNullException">If the value is <see langword="null"/>.</exception>
public Cookie(string name, string value)
: this(name, value, null, null)
{
}
/// <summary>
/// Gets the name of the cookie.
/// </summary>
[JsonProperty("name")]
public string Name
{
get { return this.cookieName; }
}
/// <summary>
/// Gets the value of the cookie.
/// </summary>
[JsonProperty("value")]
public string Value
{
get { return this.cookieValue; }
}
/// <summary>
/// Gets the domain of the cookie.
/// </summary>
[JsonProperty("domain", NullValueHandling = NullValueHandling.Ignore)]
public string Domain
{
get { return this.cookieDomain; }
}
/// <summary>
/// Gets the path of the cookie.
/// </summary>
[JsonProperty("path", NullValueHandling = NullValueHandling.Ignore)]
public virtual string Path
{
get { return this.cookiePath; }
}
/// <summary>
/// Gets a value indicating whether the cookie is secure.
/// </summary>
[JsonProperty("secure")]
public virtual bool Secure
{
get { return false; }
}
/// <summary>
/// Gets the expiration date of the cookie.
/// </summary>
public DateTime? Expiry
{
get { return this.cookieExpiry; }
}
/// <summary>
/// Gets the cookie expiration date in seconds from the defined zero date (01 January 1970 00:00:00 UTC).
/// </summary>
/// <remarks>This property only exists so that the JSON serializer can serialize a
/// cookie without resorting to a custom converter.</remarks>
[JsonProperty("expiry", NullValueHandling = NullValueHandling.Ignore)]
internal long? ExpirySeconds
{
get
{
if (this.cookieExpiry == null)
{
return null;
}
DateTime zeroDate = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc);
TimeSpan span = this.cookieExpiry.Value.ToUniversalTime().Subtract(zeroDate);
return Convert.ToInt64(span.TotalSeconds);
}
}
/// <summary>
/// Converts a Dictionary to a Cookie.
/// </summary>
/// <param name="rawCookie">The Dictionary object containing the cookie parameters.</param>
/// <returns>A <see cref="Cookie"/> object with the proper parameters set.</returns>
public static Cookie FromDictionary(Dictionary<string, object> rawCookie)
{
if (rawCookie == null)
{
throw new ArgumentNullException("rawCookie", "Dictionary cannot be null");
}
string name = rawCookie["name"].ToString();
string value = rawCookie["value"].ToString();
string path = "/";
if (rawCookie.ContainsKey("path") && rawCookie["path"] != null)
{
path = rawCookie["path"].ToString();
}
string domain = string.Empty;
if (rawCookie.ContainsKey("domain") && rawCookie["domain"] != null)
{
domain = rawCookie["domain"].ToString();
}
DateTime? expires = null;
if (rawCookie.ContainsKey("expiry") && rawCookie["expiry"] != null)
{
long seconds = 0;
if (long.TryParse(rawCookie["expiry"].ToString(), out seconds))
{
try
{
expires = new DateTime(1970, 1, 1, 0, 0, 0, DateTimeKind.Utc).AddSeconds(seconds).ToLocalTime();
}
catch (ArgumentOutOfRangeException)
{
expires = DateTime.MaxValue.ToLocalTime();
}
}
}
bool secure = false;
if (rawCookie.ContainsKey("secure") && rawCookie["secure"] != null)
{
secure = bool.Parse(rawCookie["secure"].ToString());
}
return new ReturnedCookie(name, value, domain, path, expires, secure);
}
/// <summary>
/// Creates and returns a string representation of the cookie.
/// </summary>
/// <returns>A string representation of the cookie.</returns>
public override string ToString()
{
return this.cookieName + "=" + this.cookieValue
+ (this.cookieExpiry == null ? string.Empty : "; expires=" + this.cookieExpiry.Value.ToUniversalTime().ToString("ddd MM dd yyyy hh:mm:ss UTC", CultureInfo.InvariantCulture))
+ (string.IsNullOrEmpty(this.cookiePath) ? string.Empty : "; path=" + this.cookiePath)
+ (string.IsNullOrEmpty(this.cookieDomain) ? string.Empty : "; domain=" + this.cookieDomain);
//// + (isSecure ? ";secure;" : "");
}
/// <summary>
/// Determines whether the specified <see cref="System.Object">Object</see> is equal
/// to the current <see cref="System.Object">Object</see>.
/// </summary>
/// <param name="obj">The <see cref="System.Object">Object</see> to compare with the
/// current <see cref="System.Object">Object</see>.</param>
/// <returns><see langword="true"/> if the specified <see cref="System.Object">Object</see>
/// is equal to the current <see cref="System.Object">Object</see>; otherwise,
/// <see langword="false"/>.</returns>
public override bool Equals(object obj)
{
// Two cookies are equal if the name and value match
Cookie cookie = obj as Cookie;
if (this == obj)
{
return true;
}
if (cookie == null)
{
return false;
}
if (!this.cookieName.Equals(cookie.cookieName))
{
return false;
}
return !(this.cookieValue != null ? !this.cookieValue.Equals(cookie.cookieValue) : cookie.Value != null);
}
/// <summary>
/// Serves as a hash function for a particular type.
/// </summary>
/// <returns>A hash code for the current <see cref="System.Object">Object</see>.</returns>
public override int GetHashCode()
{
return this.cookieName.GetHashCode();
}
private static string StripPort(string domain)
{
return string.IsNullOrEmpty(domain) ? null : domain.Split(':')[0];
}
}
}
| |
#region License
/* Copyright (c) 2003-2015 Llewellyn Pritchard
* All rights reserved.
* This source code is subject to terms and conditions of the BSD License.
* See license.txt. */
#endregion
#region Includes
using System;
using System.ComponentModel;
using System.Collections;
using System.Windows.Forms;
using IronScheme.Editor.Controls;
#endregion
using Microsoft.Build.BuildEngine;
using BuildProject = Microsoft.Build.BuildEngine.Project;
using Microsoft.Build.Framework;
using IronScheme.Editor.Build;
using System.Threading;
namespace IronScheme.Editor.ComponentModel
{
/// <summary>
/// Provides service for debugging
/// </summary>
[Name("Build service")]
public interface IBuildService : IService
{
/// <summary>
/// Gets or sets the logger verbosity.
/// </summary>
/// <value>The logger verbosity.</value>
LoggerVerbosity LoggerVerbosity { get;set;}
string Configuration { get; set; }
string Current { get; }
void BuildAll();
void BuildCurrent();
void BuildOrder();
void CancelBuild();
void CleanAll();
void CleanCurrent();
void RebuildAll();
void RebuildCurrent();
}
[Menu("Build")]
sealed class BuildService : ServiceBase, IBuildService
{
internal BuildProject solution;
readonly Engine buildengine = Engine.GlobalEngine;
LoggerVerbosity verbosity = LoggerVerbosity.Minimal;
public LoggerVerbosity LoggerVerbosity
{
get { return verbosity; }
set { verbosity = value; }
}
static Thread buildthread;
IDictionary SolutionProperties
{
get
{
Hashtable props = new Hashtable();
if (solution != null)
{
foreach (BuildProperty bp in solution.EvaluatedProperties)
{
if (!bp.IsImported)
{
props.Add(bp.Name, bp.Value);
}
}
}
return props;
}
}
public string Current
{
get
{
Build.Project p = ServiceHost.Project.Current;
if (p == null)
{
return null;
}
return p.ProjectName;
}
}
class ConfigurationConvertor : TypeConverter
{
public override bool GetStandardValuesSupported(ITypeDescriptorContext context)
{
return true;
}
public override TypeConverter.StandardValuesCollection GetStandardValues(ITypeDescriptorContext context)
{
return new StandardValuesCollection(new string[] { "Debug", "Release" });
}
}
[MenuItem("Configuration", Index = 1, Converter = typeof(ConfigurationConvertor), State = ApplicationState.Project)]
public string Configuration
{
get { return solution == null ? ServiceHost.Project.Current == null ? "Debug" : ServiceHost.Project.Current.Configuration : solution.GetEvaluatedProperty("Configuration"); }
set
{
if (solution == null)
{
ServiceHost.Project.Current.Configuration = value;
}
else
{
solution.SetProperty("Configuration", value);
}
}
}
BuildLogger bl;
void BuildStarted(ThreadStart t)
{
if (buildthread == null)
{
buildthread = new Thread(t);
buildthread.IsBackground = true;
(ServiceHost.File as FileManager).SaveDirtyFiles();
ServiceHost.State |= ApplicationState.Build;
ConsoleLogger l = new ConsoleLogger();
l.Verbosity = verbosity;
bl = new BuildLogger();
buildengine.RegisterLogger(l);
buildengine.RegisterLogger(bl);
buildthread.SetApartmentState(ApartmentState.STA);
buildthread.Start();
}
}
readonly Hashtable outputs = new Hashtable();
internal void BuildInternal(BuildProject project, params string[] targets)
{
if (project != null)
{
BuildProperty platform = project.GlobalProperties["Platform"];
if (platform == null)
{
project.GlobalProperties["Platform"] = platform = new BuildProperty("Platform", "AnyCPU");
}
if (string.IsNullOrEmpty( platform.Value))
{
platform.Value = "AnyCPU";
}
project.ParentEngine.GlobalProperties["SolutionDir"] = solution.GlobalProperties["SolutionDir"];
BuildStarted(delegate()
{
outputs.Clear();
try
{
bool res = project.Build(targets, outputs);
}
catch
{
if (!bl.cancel)
{
throw;
}
}
InvokeBuildCompleted();
});
}
}
delegate void VOIDVOID();
void InvokeBuildCompleted()
{
Invoke(new VOIDVOID(BuildCompleted), null);
}
void BuildCompleted()
{
if (buildthread != null)
{
buildthread = null;
}
buildengine.UnregisterAllLoggers();
ServiceHost.State &= ~ApplicationState.Build;
}
[MenuItem("Build All", Index = 11, State = ApplicationState.Project, Image = "Project.Build.png")]
public void BuildAll()
{
BuildInternal(solution ?? ServiceHost.Project.Current.MSBuildProject);
}
[MenuItem("Rebuild All", Index = 12, State = ApplicationState.Project)]
public void RebuildAll()
{
BuildInternal(solution ?? ServiceHost.Project.Current.MSBuildProject, "Rebuild");
}
[MenuItem("Clean All", Index = 13, State = ApplicationState.Project)]
public void CleanAll()
{
BuildInternal(solution ?? ServiceHost.Project.Current.MSBuildProject, "Clean");
}
[MenuItem("Build {Current}", Index = 21, State = ApplicationState.Project, Image = "Project.Build.png")]
public void BuildCurrent()
{
BuildInternal(ServiceHost.Project.Current.MSBuildProject);
}
[MenuItem("Rebuild {Current}", Index = 22, State = ApplicationState.Project)]
public void RebuildCurrent()
{
BuildInternal(ServiceHost.Project.Current.MSBuildProject, "Rebuild");
}
[MenuItem("Clean {Current}", Index = 23, State = ApplicationState.Project)]
public void CleanCurrent()
{
BuildInternal(ServiceHost.Project.Current.MSBuildProject, "Clean");
}
[MenuItem("Cancel Build", Index = 900, State = ApplicationState.Project | ApplicationState.Build, Image = "Build.Cancel.png")]
public void CancelBuild()
{
if (buildthread != null)
{
bl.cancel = true;
Console.WriteLine("User cancelled build");
ServiceHost.Error.OutputErrors(ServiceHost.Project,
new ActionResult(ActionResultType.Warning, 0, 0, "User cancelled build", null, null));
BuildCompleted();
}
}
[MenuItem("Build Order", Index = 1000, State = ApplicationState.Project)]
public void BuildOrder()
{
ProjectManager pm = ServiceHost.Project as ProjectManager;
ProjectBuildOrderForm bof = new ProjectBuildOrderForm();
if (DialogResult.OK == bof.ShowDialog(ServiceHost.Window.MainForm))
{
pm.projects.Clear();
pm.projects.AddRange(bof.listBox1.Items);
}
}
}
}
| |
namespace Nancy.ModelBinding
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Linq.Expressions;
using Nancy.Validation;
public static class ModuleExtensions
{
private static readonly string[] NoBlacklistedProperties = new string[0];
/// <summary>
/// Parses an array of expressions like <code>t => t.Property</code> to a list of strings containing the property names;
/// </summary>
/// <typeparam name="T">Type of the model</typeparam>
/// <param name="expressions">Expressions that tell which property should be ignored</param>
/// <returns>Array of strings containing the names of the properties.</returns>
private static string[] ParseBlacklistedPropertiesExpressionTree<T>(this IEnumerable<Expression<Func<T, object>>> expressions)
{
return expressions.Select(p => p.GetTargetMemberInfo().Name).ToArray();
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <param name="module">Current module</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <returns>Model adapter - cast to a model type to bind it</returns>
public static dynamic Bind(this INancyModule module, params string[] blacklistedProperties)
{
return module.Bind(BindingConfig.Default, blacklistedProperties);
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <returns>Model adapter - cast to a model type to bind it</returns>
public static dynamic Bind(this INancyModule module, BindingConfig configuration, params string[] blacklistedProperties)
{
return new DynamicModelBinderAdapter(module.ModelBinderLocator, module.Context, null, configuration, blacklistedProperties);
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module)
{
return module.Bind();
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module, params string[] blacklistedProperties)
{
return module.Bind(blacklistedProperties);
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
return module.Bind<TModel>(blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
}
/// <summary>
/// Bind the incoming request to a model and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <returns>Bound model instance</returns>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindAndValidate<TModel>(this INancyModule module, params string[] blacklistedProperties)
{
var model = module.Bind<TModel>(blacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to a model and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
/// <returns>Bound model instance</returns>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindAndValidate<TModel>(this INancyModule module, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
var model = module.Bind<TModel>(blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to a model and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <returns>Bound model instance</returns>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindAndValidate<TModel>(this INancyModule module)
{
var model = module.Bind<TModel>(NoBlacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module, BindingConfig configuration)
{
return module.Bind(configuration);
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module, BindingConfig configuration, params string[] blacklistedProperties)
{
return module.Bind(configuration, blacklistedProperties);
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperty">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module, BindingConfig configuration, Expression<Func<TModel, object>> blacklistedProperty)
{
return module.Bind(configuration, new [] { blacklistedProperty }.ParseBlacklistedPropertiesExpressionTree());
}
/// <summary>
/// Bind the incoming request to a model
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
/// <returns>Bound model instance</returns>
public static TModel Bind<TModel>(this INancyModule module, BindingConfig configuration, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
return module.Bind(configuration, blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
}
/// <summary>
/// Bind the incoming request to a model and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <returns>Bound model instance</returns>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindAndValidate<TModel>(this INancyModule module, BindingConfig configuration, params string[] blacklistedProperties)
{
var model = module.Bind<TModel>(configuration, blacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to a model and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
/// <returns>Bound model instance</returns>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindAndValidate<TModel>(this INancyModule module, BindingConfig configuration, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
var model = module.Bind<TModel>(configuration, blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to a model and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <returns>Bound model instance</returns>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindAndValidate<TModel>(this INancyModule module, BindingConfig configuration)
{
var model = module.Bind<TModel>(configuration, NoBlacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to an existing instance
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
public static TModel BindTo<TModel>(this INancyModule module, TModel instance, params string[] blacklistedProperties)
{
return module.BindTo(instance, BindingConfig.Default, blacklistedProperties);
}
/// <summary>
/// Bind the incoming request to an existing instance
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
public static TModel BindTo<TModel>(this INancyModule module, TModel instance, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
return module.BindTo(instance, BindingConfig.Default, blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
}
/// <summary>
/// Bind the incoming request to an existing instance
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
public static TModel BindTo<TModel>(this INancyModule module, TModel instance)
{
return module.BindTo(instance, BindingConfig.Default, NoBlacklistedProperties);
}
/// <summary>
/// Bind the incoming request to an existing instance and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindToAndValidate<TModel>(this INancyModule module, TModel instance, params string[] blacklistedProperties)
{
var model = module.BindTo(instance, blacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to an existing instance and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindToAndValidate<TModel>(this INancyModule module, TModel instance, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
var model = module.BindTo(instance, blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to an existing instance and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindToAndValidate<TModel>(this INancyModule module, TModel instance)
{
var model = module.BindTo(instance, NoBlacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to an existing instance
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
public static TModel BindTo<TModel>(this INancyModule module, TModel instance, BindingConfig configuration, params string[] blacklistedProperties)
{
dynamic adapter =
new DynamicModelBinderAdapter(module.ModelBinderLocator, module.Context, instance, configuration, blacklistedProperties);
return adapter;
}
/// <summary>
/// Bind the incoming request to an existing instance
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <example>this.Bind<Person>(p => p.Name, p => p.Age)</example>
public static TModel BindTo<TModel>(this INancyModule module, TModel instance, BindingConfig configuration, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
return module.BindTo(instance, configuration, blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
}
/// <summary>
/// Bind the incoming request to an existing instance
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
public static TModel BindTo<TModel>(this INancyModule module, TModel instance, BindingConfig configuration)
{
return module.BindTo(instance, configuration, NoBlacklistedProperties);
}
/// <summary>
/// Bind the incoming request to an existing instance and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Property names to blacklist from binding</param>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindToAndValidate<TModel>(this INancyModule module, TModel instance, BindingConfig configuration, params string[] blacklistedProperties)
{
var model = module.BindTo(instance, configuration, blacklistedProperties);
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to an existing instance and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <param name="blacklistedProperties">Expressions that tell which property should be ignored</param>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
/// <example>this.BindToAndValidate(person, config, p => p.Name, p => p.Age)</example>
public static TModel BindToAndValidate<TModel>(this INancyModule module, TModel instance, BindingConfig configuration, params Expression<Func<TModel, object>>[] blacklistedProperties)
{
var model = module.BindTo(instance, configuration, blacklistedProperties.ParseBlacklistedPropertiesExpressionTree());
module.Validate(model);
return model;
}
/// <summary>
/// Bind the incoming request to an existing instance and validate
/// </summary>
/// <typeparam name="TModel">Model type</typeparam>
/// <param name="module">Current module</param>
/// <param name="instance">The class instance to bind properties to</param>
/// <param name="configuration">The <see cref="BindingConfig"/> that should be applied during binding.</param>
/// <remarks><see cref="ModelValidationResult"/> is stored in NancyModule.ModelValidationResult and NancyContext.ModelValidationResult.</remarks>
public static TModel BindToAndValidate<TModel>(this INancyModule module, TModel instance, BindingConfig configuration)
{
var model = module.BindTo(instance, configuration, NoBlacklistedProperties);
module.Validate(model);
return model;
}
}
}
| |
// ***************
// requires reference for System.Management.dll
// ***************
using Microsoft.Win32;
using System;
using System.Linq;
using System.Collections.Concurrent;
using System.Diagnostics;
using System.Management;
using System.Threading;
using System.Collections.Generic;
namespace BigDataPipeline.Core
{
public class SystemInformation
{
public static int AverageProcessorLoad
{
get
{
if (_monitoringLevel != MonitoringLevels.Paused)
return _averageProcessorLoad;
return GetCurrentProcessorLoad ();
}
}
public static int CurrentProcessorLoad
{
get
{
if (_monitoringLevel != MonitoringLevels.Paused)
return _currentProcessorLoad;
return GetCurrentProcessorLoad ();
}
}
public static long CurrentFreeMemory
{
get
{
if (_monitoringLevel != MonitoringLevels.Paused && _currentFreeMemory > 0)
return _currentFreeMemory;
return GetCurrentFreeMemory ();
}
}
public static long TotalMemory
{
get { loadInfo (); return _totalMemory; }
}
public static long ApplicationTotalMemory
{
get
{
if (_monitoringLevel != MonitoringLevels.Paused && _currentFreeMemory > 0)
return _applicationTotalMemory;
return GetApplicationTotalMemory ();
}
}
public static string OsName
{
get { loadInfo (); return _osName; }
}
public static int ProcessId
{
get { loadInfo (); return _processId; }
}
public static int LogicalProcessors
{
get { loadInfo (); return _logicalProcessors; }
}
public static int ProcessorCores
{
get { loadInfo (); return _processorCores; }
}
public static int Processors
{
get { loadInfo (); return _processors; }
}
public static string SystemName
{
get { loadInfo (); return _systemName; }
}
public static string ProcessorName
{
get { loadInfo (); return _processorName; }
}
public static string CurrentPublicIp
{
get { loadInfo (); return _currentPublicIp; }
}
public static string AwsInstanceType
{
get { loadInfo (); return _awsInstanceType; }
}
public static string AwsInstanceId
{
get { loadInfo (); return _awsInstanceId; }
}
public static string AwsAmiId
{
get { loadInfo (); return _awsAmiId; }
}
public static string AwsPublicIp
{
get { loadInfo (); return _awsPublicIp; }
}
public static bool IsRunningOnAWS
{
get { loadInfo (); return !String.IsNullOrEmpty (_awsInstanceId); }
}
public static string[] DiskInfo
{
get { loadInfo (); return _disks; }
}
private static volatile bool _loaded = false;
private static string _processorName = null;
private static string _systemName = null;
private static string _osName = null;
private static int _processors = 0;
private static int _processorCores = 0;
private static int _logicalProcessors = 0;
private static int _processId = 0;
private static long _totalMemory = 0;
private static int _currentProcessorLoad = 0;
private static int _averageProcessorLoad = 0;
private static long _currentFreeMemory = 0;
private static long _applicationTotalMemory = 0;
private static string _currentPublicIp;
private static string _awsInstanceType;
private static string _awsInstanceId;
private static string _awsAmiId;
private static string _awsPublicIp;
private static string[] _disks;
public static void loadInfo ()
{
if (!_loaded)
{
try
{
_processId = Process.GetCurrentProcess ().Id;
// System & CPUs
using (var query = new ManagementObjectSearcher ("SELECT name, SystemName, NumberOfCores, NumberOfLogicalProcessors FROM Win32_Processor"))
{
foreach (var info in query.Get ())
{
_processorName = info["name"].ToString ().Replace ("CPU ", "CPU ").Replace (" ", " ");
_systemName = info["SystemName"].ToString ();
_processorCores = Convert.ToInt32 (info["NumberOfCores"]);
_logicalProcessors = Convert.ToInt32 (info["NumberOfLogicalProcessors"]);
}
}
// CPUs
using (var query = new ManagementObjectSearcher ("SELECT NumberOfProcessors FROM Win32_ComputerSystem"))
{
foreach (var info in query.Get ())
{
_processors = Convert.ToInt32 (info["NumberOfProcessors"]);
}
}
// Memory
using (var query = new ManagementObjectSearcher ("SELECT Caption, TotalVisibleMemorySize FROM Win32_OperatingSystem"))
{
foreach (var info in query.Get ())
{
_totalMemory = Convert.ToInt64 (info["TotalVisibleMemorySize"]);
_osName = info["Caption"].ToString ();
}
}
// Disks
using (var query = new ManagementObjectSearcher ("SELECT VolumeName, Size, FreeSpace FROM Win32_LogicalDisk"))
{
var tmp = new List<string> ();
foreach (var info in query.Get ())
{
var sz = info["Size"];
var fs = info["FreeSpace"];
tmp.Add (info["VolumeName"] + " " + fs + " / " + fs);
}
_disks = tmp.ToArray ();
}
ReloadNetworkInfo ();
}
catch
{
_processorName = "NotFound";
_systemName = "NotFound";
}
_loaded = true;
ReloadNetworkInfo ();
}
}
public static void ReloadNetworkInfo ()
{
_currentPublicIp = getCurrentIpAddress ();
getAmazonWebServicesInfo ();
}
private static string getCurrentIpAddress ()
{
try
{
return System.Net.Dns.GetHostAddresses (System.Net.Dns.GetHostName ())
.Where (i => i.AddressFamily == System.Net.Sockets.AddressFamily.InterNetwork)
.Select (i => i.ToString ()).FirstOrDefault ();
}
catch
{
return "";
}
}
private static void getAmazonWebServicesInfo ()
{
try
{
using (var web = new System.Net.WebClient ())
{
_awsInstanceType = web.DownloadString ("http://169.254.169.254/latest/meta-data/instance-type");
_awsInstanceId = web.DownloadString ("http://169.254.169.254/latest/meta-data/instance-id");
_awsAmiId = web.DownloadString ("http://169.254.169.254/latest/meta-data/ami-id");
_awsPublicIp = web.DownloadString ("http://169.254.169.254/latest/meta-data/public-ipv4");
_currentPublicIp = _awsPublicIp;
}
}
catch
{
_awsInstanceType = null;
_awsInstanceId = null;
}
}
public static long GetApplicationTotalMemory ()
{
using (var p = Process.GetCurrentProcess ())
{
return p.PrivateMemorySize64;
}
}
public static int GetCurrentProcessorLoad ()
{
try
{
int load = 0;
int count = 0;
var mosProcessor = new ManagementObjectSearcher ("SELECT LoadPercentage FROM Win32_Processor");
foreach (var moProcessor in mosProcessor.Get ())
{
count++;
load += Convert.ToInt32 (moProcessor["LoadPercentage"]);
}
return load / count;
}
catch
{
return 0;
}
}
private static long GetCurrentFreeMemory ()
{
try
{
var mosProcessor = new ManagementObjectSearcher ("SELECT FreePhysicalMemory FROM Win32_OperatingSystem");
foreach (var moProcessor in mosProcessor.Get ())
{
return Convert.ToInt64 (moProcessor["FreePhysicalMemory"]);
}
}
catch
{
}
return 0;
}
private static System.Threading.Timer m_runningTask = null;
private static MonitoringLevels _monitoringLevel = MonitoringLevels.Paused;
private static int _historyLengthMinutes = 2;
public enum MonitoringLevels { Paused = 0, VeryHigh = 2, High = 5, MidHigh = 10, Normal = 20, MidLow = 40, Low = 60 }
public static void StartMonitoring (MonitoringLevels level, int historyLengthMinutes = 2)
{
// set timer
if (level == MonitoringLevels.Paused)
{
StopMonitoring ();
}
else if (m_runningTask != null)
{
if (_monitoringLevel != level)
{
m_runningTask.Change (0, (int)level * 1000);
}
}
else
{
loadInfo ();
m_runningTask = new System.Threading.Timer (UpdateEvent, null, 0, (int)level * 1000);
}
if (historyLengthMinutes < 1)
historyLengthMinutes = 1;
_historyLengthMinutes = historyLengthMinutes;
_monitoringLevel = level;
}
public static void StopMonitoring ()
{
if (m_runningTask != null)
m_runningTask.Dispose ();
m_runningTask = null;
}
static ConcurrentQueue<int> processorLoadHistory = new ConcurrentQueue<int> ();
private static void UpdateEvent (object state)
{
int load = GetCurrentProcessorLoad ();
processorLoadHistory.Enqueue (load);
// calculate average value
int avgLoad = 0;
foreach (int v in processorLoadHistory)
{
avgLoad += v;
}
avgLoad = avgLoad / processorLoadHistory.Count;
// trim queue size (+/- 2 minute of values)
int sz = (_historyLengthMinutes * 60) / ((int)_monitoringLevel > 0 ? (int)_monitoringLevel : 1);
if (sz < 2)
sz = 2;
while (processorLoadHistory.Count > sz)
{
processorLoadHistory.TryDequeue (out avgLoad);
}
// update values
Interlocked.Exchange (ref _currentProcessorLoad, load);
Interlocked.Exchange (ref _averageProcessorLoad, avgLoad);
Interlocked.Exchange (ref _currentFreeMemory, GetCurrentFreeMemory ());
Interlocked.Exchange (ref _applicationTotalMemory, GetApplicationTotalMemory ());
}
}
}
| |
using System;
using System.Windows.Forms;
using System.Drawing;
using System.ComponentModel;
using System.Collections;
namespace fyiReporting.RdlDesign
{
internal partial class FindTab : System.Windows.Forms.Form
{
#region Windows Form Designer generated code
private System.Windows.Forms.TabPage tabFind;
private System.Windows.Forms.Label label1;
private System.Windows.Forms.TextBox txtFind;
private System.Windows.Forms.RadioButton radioUp;
private System.Windows.Forms.RadioButton radioDown;
private System.Windows.Forms.GroupBox groupBox1;
private System.Windows.Forms.CheckBox chkCase;
public System.Windows.Forms.TabPage tabGoTo;
private System.Windows.Forms.Label label4;
private System.Windows.Forms.TextBox txtLine;
private System.Windows.Forms.Button btnNext;
private RdlEditPreview rdlEdit;
private System.Windows.Forms.Button btnGoto;
private System.Windows.Forms.Button btnCancel;
public System.Windows.Forms.TabPage tabReplace;
private System.Windows.Forms.Button btnFindNext;
private System.Windows.Forms.CheckBox chkMatchCase;
private System.Windows.Forms.Button btnReplaceAll;
private System.Windows.Forms.Button btnReplace;
private System.Windows.Forms.TextBox txtFindR;
private System.Windows.Forms.Label label3;
private System.Windows.Forms.Label label2;
private System.Windows.Forms.TextBox txtReplace;
private System.Windows.Forms.Button bCloseReplace;
private System.Windows.Forms.Button bCloseGoto;
public System.Windows.Forms.TabControl tcFRG;
private System.ComponentModel.Container components = null;
private void InitializeComponent()
{
System.ComponentModel.ComponentResourceManager resources = new System.ComponentModel.ComponentResourceManager(typeof(FindTab));
this.tcFRG = new System.Windows.Forms.TabControl();
this.tabFind = new System.Windows.Forms.TabPage();
this.btnCancel = new System.Windows.Forms.Button();
this.btnNext = new System.Windows.Forms.Button();
this.chkCase = new System.Windows.Forms.CheckBox();
this.groupBox1 = new System.Windows.Forms.GroupBox();
this.radioUp = new System.Windows.Forms.RadioButton();
this.radioDown = new System.Windows.Forms.RadioButton();
this.label1 = new System.Windows.Forms.Label();
this.txtFind = new System.Windows.Forms.TextBox();
this.tabReplace = new System.Windows.Forms.TabPage();
this.bCloseReplace = new System.Windows.Forms.Button();
this.btnFindNext = new System.Windows.Forms.Button();
this.chkMatchCase = new System.Windows.Forms.CheckBox();
this.btnReplaceAll = new System.Windows.Forms.Button();
this.btnReplace = new System.Windows.Forms.Button();
this.txtFindR = new System.Windows.Forms.TextBox();
this.label3 = new System.Windows.Forms.Label();
this.label2 = new System.Windows.Forms.Label();
this.txtReplace = new System.Windows.Forms.TextBox();
this.tabGoTo = new System.Windows.Forms.TabPage();
this.bCloseGoto = new System.Windows.Forms.Button();
this.txtLine = new System.Windows.Forms.TextBox();
this.label4 = new System.Windows.Forms.Label();
this.btnGoto = new System.Windows.Forms.Button();
this.tcFRG.SuspendLayout();
this.tabFind.SuspendLayout();
this.groupBox1.SuspendLayout();
this.tabReplace.SuspendLayout();
this.tabGoTo.SuspendLayout();
this.SuspendLayout();
//
// tcFRG
//
resources.ApplyResources(this.tcFRG, "tcFRG");
this.tcFRG.Controls.Add(this.tabFind);
this.tcFRG.Controls.Add(this.tabReplace);
this.tcFRG.Controls.Add(this.tabGoTo);
this.tcFRG.Name = "tcFRG";
this.tcFRG.SelectedIndex = 0;
this.tcFRG.SelectedIndexChanged += new System.EventHandler(this.tcFRG_SelectedIndexChanged);
this.tcFRG.Enter += new System.EventHandler(this.tcFRG_Enter);
//
// tabFind
//
resources.ApplyResources(this.tabFind, "tabFind");
this.tabFind.Controls.Add(this.btnCancel);
this.tabFind.Controls.Add(this.btnNext);
this.tabFind.Controls.Add(this.chkCase);
this.tabFind.Controls.Add(this.groupBox1);
this.tabFind.Controls.Add(this.label1);
this.tabFind.Controls.Add(this.txtFind);
this.tabFind.Name = "tabFind";
this.tabFind.Tag = "find";
//
// btnCancel
//
resources.ApplyResources(this.btnCancel, "btnCancel");
this.btnCancel.DialogResult = System.Windows.Forms.DialogResult.Cancel;
this.btnCancel.Name = "btnCancel";
this.btnCancel.Click += new System.EventHandler(this.btnCancel_Click);
//
// btnNext
//
resources.ApplyResources(this.btnNext, "btnNext");
this.btnNext.Name = "btnNext";
this.btnNext.Click += new System.EventHandler(this.btnNext_Click);
//
// chkCase
//
resources.ApplyResources(this.chkCase, "chkCase");
this.chkCase.Name = "chkCase";
//
// groupBox1
//
resources.ApplyResources(this.groupBox1, "groupBox1");
this.groupBox1.Controls.Add(this.radioUp);
this.groupBox1.Controls.Add(this.radioDown);
this.groupBox1.Name = "groupBox1";
this.groupBox1.TabStop = false;
//
// radioUp
//
resources.ApplyResources(this.radioUp, "radioUp");
this.radioUp.Name = "radioUp";
this.radioUp.CheckedChanged += new System.EventHandler(this.radioUp_CheckedChanged);
//
// radioDown
//
resources.ApplyResources(this.radioDown, "radioDown");
this.radioDown.Name = "radioDown";
//
// label1
//
resources.ApplyResources(this.label1, "label1");
this.label1.Name = "label1";
//
// txtFind
//
resources.ApplyResources(this.txtFind, "txtFind");
this.txtFind.Name = "txtFind";
this.txtFind.TextChanged += new System.EventHandler(this.txtFind_TextChanged);
//
// tabReplace
//
resources.ApplyResources(this.tabReplace, "tabReplace");
this.tabReplace.Controls.Add(this.bCloseReplace);
this.tabReplace.Controls.Add(this.btnFindNext);
this.tabReplace.Controls.Add(this.chkMatchCase);
this.tabReplace.Controls.Add(this.btnReplaceAll);
this.tabReplace.Controls.Add(this.btnReplace);
this.tabReplace.Controls.Add(this.txtFindR);
this.tabReplace.Controls.Add(this.label3);
this.tabReplace.Controls.Add(this.label2);
this.tabReplace.Controls.Add(this.txtReplace);
this.tabReplace.Name = "tabReplace";
this.tabReplace.Tag = "replace";
//
// bCloseReplace
//
resources.ApplyResources(this.bCloseReplace, "bCloseReplace");
this.bCloseReplace.Name = "bCloseReplace";
this.bCloseReplace.Click += new System.EventHandler(this.btnCancel_Click);
//
// btnFindNext
//
resources.ApplyResources(this.btnFindNext, "btnFindNext");
this.btnFindNext.Name = "btnFindNext";
this.btnFindNext.Click += new System.EventHandler(this.btnFindNext_Click);
//
// chkMatchCase
//
resources.ApplyResources(this.chkMatchCase, "chkMatchCase");
this.chkMatchCase.Name = "chkMatchCase";
//
// btnReplaceAll
//
resources.ApplyResources(this.btnReplaceAll, "btnReplaceAll");
this.btnReplaceAll.Name = "btnReplaceAll";
this.btnReplaceAll.Click += new System.EventHandler(this.btnReplaceAll_Click);
//
// btnReplace
//
resources.ApplyResources(this.btnReplace, "btnReplace");
this.btnReplace.Name = "btnReplace";
this.btnReplace.Click += new System.EventHandler(this.btnReplace_Click);
//
// txtFindR
//
resources.ApplyResources(this.txtFindR, "txtFindR");
this.txtFindR.Name = "txtFindR";
this.txtFindR.TextChanged += new System.EventHandler(this.txtFindR_TextChanged);
//
// label3
//
resources.ApplyResources(this.label3, "label3");
this.label3.Name = "label3";
//
// label2
//
resources.ApplyResources(this.label2, "label2");
this.label2.Name = "label2";
//
// txtReplace
//
resources.ApplyResources(this.txtReplace, "txtReplace");
this.txtReplace.Name = "txtReplace";
//
// tabGoTo
//
resources.ApplyResources(this.tabGoTo, "tabGoTo");
this.tabGoTo.Controls.Add(this.bCloseGoto);
this.tabGoTo.Controls.Add(this.txtLine);
this.tabGoTo.Controls.Add(this.label4);
this.tabGoTo.Controls.Add(this.btnGoto);
this.tabGoTo.Name = "tabGoTo";
this.tabGoTo.Tag = "goto";
//
// bCloseGoto
//
resources.ApplyResources(this.bCloseGoto, "bCloseGoto");
this.bCloseGoto.Name = "bCloseGoto";
this.bCloseGoto.Click += new System.EventHandler(this.btnCancel_Click);
//
// txtLine
//
resources.ApplyResources(this.txtLine, "txtLine");
this.txtLine.Name = "txtLine";
//
// label4
//
resources.ApplyResources(this.label4, "label4");
this.label4.Name = "label4";
//
// btnGoto
//
resources.ApplyResources(this.btnGoto, "btnGoto");
this.btnGoto.Name = "btnGoto";
this.btnGoto.Click += new System.EventHandler(this.btnGoto_Click);
//
// FindTab
//
resources.ApplyResources(this, "$this");
this.CancelButton = this.btnCancel;
this.Controls.Add(this.tcFRG);
this.Name = "FindTab";
this.TopMost = true;
this.tcFRG.ResumeLayout(false);
this.tabFind.ResumeLayout(false);
this.tabFind.PerformLayout();
this.groupBox1.ResumeLayout(false);
this.tabReplace.ResumeLayout(false);
this.tabReplace.PerformLayout();
this.tabGoTo.ResumeLayout(false);
this.tabGoTo.PerformLayout();
this.ResumeLayout(false);
}
#endregion
protected override void Dispose( bool disposing )
{
if( disposing )
{
if(components != null)
{
components.Dispose();
}
}
base.Dispose( disposing );
}
}
}
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using System.Reflection;
namespace CookBookAPI.Areas.HelpPage
{
/// <summary>
/// This class will create an object of a given type and populate it with sample data.
/// </summary>
public class ObjectGenerator
{
internal const int DefaultCollectionSize = 2;
private readonly SimpleTypeObjectGenerator SimpleObjectGenerator = new SimpleTypeObjectGenerator();
/// <summary>
/// Generates an object for a given type. The type needs to be public, have a public default constructor and settable public properties/fields. Currently it supports the following types:
/// Simple types: <see cref="int"/>, <see cref="string"/>, <see cref="Enum"/>, <see cref="DateTime"/>, <see cref="Uri"/>, etc.
/// Complex types: POCO types.
/// Nullables: <see cref="Nullable{T}"/>.
/// Arrays: arrays of simple types or complex types.
/// Key value pairs: <see cref="KeyValuePair{TKey,TValue}"/>
/// Tuples: <see cref="Tuple{T1}"/>, <see cref="Tuple{T1,T2}"/>, etc
/// Dictionaries: <see cref="IDictionary{TKey,TValue}"/> or anything deriving from <see cref="IDictionary{TKey,TValue}"/>.
/// Collections: <see cref="IList{T}"/>, <see cref="IEnumerable{T}"/>, <see cref="ICollection{T}"/>, <see cref="IList"/>, <see cref="IEnumerable"/>, <see cref="ICollection"/> or anything deriving from <see cref="ICollection{T}"/> or <see cref="IList"/>.
/// Queryables: <see cref="IQueryable"/>, <see cref="IQueryable{T}"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>An object of the given type.</returns>
public object GenerateObject(Type type)
{
return GenerateObject(type, new Dictionary<Type, object>());
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Here we just want to return null if anything goes wrong.")]
private object GenerateObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
try
{
if (SimpleTypeObjectGenerator.CanGenerateObject(type))
{
return SimpleObjectGenerator.GenerateObject(type);
}
if (type.IsArray)
{
return GenerateArray(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsGenericType)
{
return GenerateGenericType(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IDictionary))
{
return GenerateDictionary(typeof(Hashtable), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IDictionary).IsAssignableFrom(type))
{
return GenerateDictionary(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IList) ||
type == typeof(IEnumerable) ||
type == typeof(ICollection))
{
return GenerateCollection(typeof(ArrayList), DefaultCollectionSize, createdObjectReferences);
}
if (typeof(IList).IsAssignableFrom(type))
{
return GenerateCollection(type, DefaultCollectionSize, createdObjectReferences);
}
if (type == typeof(IQueryable))
{
return GenerateQueryable(type, DefaultCollectionSize, createdObjectReferences);
}
if (type.IsEnum)
{
return GenerateEnum(type);
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
}
catch
{
// Returns null if anything fails
return null;
}
return null;
}
private static object GenerateGenericType(Type type, int collectionSize, Dictionary<Type, object> createdObjectReferences)
{
Type genericTypeDefinition = type.GetGenericTypeDefinition();
if (genericTypeDefinition == typeof(Nullable<>))
{
return GenerateNullable(type, createdObjectReferences);
}
if (genericTypeDefinition == typeof(KeyValuePair<,>))
{
return GenerateKeyValuePair(type, createdObjectReferences);
}
if (IsTuple(genericTypeDefinition))
{
return GenerateTuple(type, createdObjectReferences);
}
Type[] genericArguments = type.GetGenericArguments();
if (genericArguments.Length == 1)
{
if (genericTypeDefinition == typeof(IList<>) ||
genericTypeDefinition == typeof(IEnumerable<>) ||
genericTypeDefinition == typeof(ICollection<>))
{
Type collectionType = typeof(List<>).MakeGenericType(genericArguments);
return GenerateCollection(collectionType, collectionSize, createdObjectReferences);
}
if (genericTypeDefinition == typeof(IQueryable<>))
{
return GenerateQueryable(type, collectionSize, createdObjectReferences);
}
Type closedCollectionType = typeof(ICollection<>).MakeGenericType(genericArguments[0]);
if (closedCollectionType.IsAssignableFrom(type))
{
return GenerateCollection(type, collectionSize, createdObjectReferences);
}
}
if (genericArguments.Length == 2)
{
if (genericTypeDefinition == typeof(IDictionary<,>))
{
Type dictionaryType = typeof(Dictionary<,>).MakeGenericType(genericArguments);
return GenerateDictionary(dictionaryType, collectionSize, createdObjectReferences);
}
Type closedDictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments[0], genericArguments[1]);
if (closedDictionaryType.IsAssignableFrom(type))
{
return GenerateDictionary(type, collectionSize, createdObjectReferences);
}
}
if (type.IsPublic || type.IsNestedPublic)
{
return GenerateComplexObject(type, createdObjectReferences);
}
return null;
}
private static object GenerateTuple(Type type, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = type.GetGenericArguments();
object[] parameterValues = new object[genericArgs.Length];
bool failedToCreateTuple = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < genericArgs.Length; i++)
{
parameterValues[i] = objectGenerator.GenerateObject(genericArgs[i], createdObjectReferences);
failedToCreateTuple &= parameterValues[i] == null;
}
if (failedToCreateTuple)
{
return null;
}
object result = Activator.CreateInstance(type, parameterValues);
return result;
}
private static bool IsTuple(Type genericTypeDefinition)
{
return genericTypeDefinition == typeof(Tuple<>) ||
genericTypeDefinition == typeof(Tuple<,>) ||
genericTypeDefinition == typeof(Tuple<,,>) ||
genericTypeDefinition == typeof(Tuple<,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,>) ||
genericTypeDefinition == typeof(Tuple<,,,,,,,>);
}
private static object GenerateKeyValuePair(Type keyValuePairType, Dictionary<Type, object> createdObjectReferences)
{
Type[] genericArgs = keyValuePairType.GetGenericArguments();
Type typeK = genericArgs[0];
Type typeV = genericArgs[1];
ObjectGenerator objectGenerator = new ObjectGenerator();
object keyObject = objectGenerator.GenerateObject(typeK, createdObjectReferences);
object valueObject = objectGenerator.GenerateObject(typeV, createdObjectReferences);
if (keyObject == null && valueObject == null)
{
// Failed to create key and values
return null;
}
object result = Activator.CreateInstance(keyValuePairType, keyObject, valueObject);
return result;
}
private static object GenerateArray(Type arrayType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = arrayType.GetElementType();
Array result = Array.CreateInstance(type, size);
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
result.SetValue(element, i);
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateDictionary(Type dictionaryType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type typeK = typeof(object);
Type typeV = typeof(object);
if (dictionaryType.IsGenericType)
{
Type[] genericArgs = dictionaryType.GetGenericArguments();
typeK = genericArgs[0];
typeV = genericArgs[1];
}
object result = Activator.CreateInstance(dictionaryType);
MethodInfo addMethod = dictionaryType.GetMethod("Add") ?? dictionaryType.GetMethod("TryAdd");
MethodInfo containsMethod = dictionaryType.GetMethod("Contains") ?? dictionaryType.GetMethod("ContainsKey");
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object newKey = objectGenerator.GenerateObject(typeK, createdObjectReferences);
if (newKey == null)
{
// Cannot generate a valid key
return null;
}
bool containsKey = (bool)containsMethod.Invoke(result, new object[] { newKey });
if (!containsKey)
{
object newValue = objectGenerator.GenerateObject(typeV, createdObjectReferences);
addMethod.Invoke(result, new object[] { newKey, newValue });
}
}
return result;
}
private static object GenerateEnum(Type enumType)
{
Array possibleValues = Enum.GetValues(enumType);
if (possibleValues.Length > 0)
{
return possibleValues.GetValue(0);
}
return null;
}
private static object GenerateQueryable(Type queryableType, int size, Dictionary<Type, object> createdObjectReferences)
{
bool isGeneric = queryableType.IsGenericType;
object list;
if (isGeneric)
{
Type listType = typeof(List<>).MakeGenericType(queryableType.GetGenericArguments());
list = GenerateCollection(listType, size, createdObjectReferences);
}
else
{
list = GenerateArray(typeof(object[]), size, createdObjectReferences);
}
if (list == null)
{
return null;
}
if (isGeneric)
{
Type argumentType = typeof(IEnumerable<>).MakeGenericType(queryableType.GetGenericArguments());
MethodInfo asQueryableMethod = typeof(Queryable).GetMethod("AsQueryable", new[] { argumentType });
return asQueryableMethod.Invoke(null, new[] { list });
}
return Queryable.AsQueryable((IEnumerable)list);
}
private static object GenerateCollection(Type collectionType, int size, Dictionary<Type, object> createdObjectReferences)
{
Type type = collectionType.IsGenericType ?
collectionType.GetGenericArguments()[0] :
typeof(object);
object result = Activator.CreateInstance(collectionType);
MethodInfo addMethod = collectionType.GetMethod("Add");
bool areAllElementsNull = true;
ObjectGenerator objectGenerator = new ObjectGenerator();
for (int i = 0; i < size; i++)
{
object element = objectGenerator.GenerateObject(type, createdObjectReferences);
addMethod.Invoke(result, new object[] { element });
areAllElementsNull &= element == null;
}
if (areAllElementsNull)
{
return null;
}
return result;
}
private static object GenerateNullable(Type nullableType, Dictionary<Type, object> createdObjectReferences)
{
Type type = nullableType.GetGenericArguments()[0];
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type, createdObjectReferences);
}
private static object GenerateComplexObject(Type type, Dictionary<Type, object> createdObjectReferences)
{
object result = null;
if (createdObjectReferences.TryGetValue(type, out result))
{
// The object has been created already, just return it. This will handle the circular reference case.
return result;
}
if (type.IsValueType)
{
result = Activator.CreateInstance(type);
}
else
{
ConstructorInfo defaultCtor = type.GetConstructor(Type.EmptyTypes);
if (defaultCtor == null)
{
// Cannot instantiate the type because it doesn't have a default constructor
return null;
}
result = defaultCtor.Invoke(new object[0]);
}
createdObjectReferences.Add(type, result);
SetPublicProperties(type, result, createdObjectReferences);
SetPublicFields(type, result, createdObjectReferences);
return result;
}
private static void SetPublicProperties(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
PropertyInfo[] properties = type.GetProperties(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (PropertyInfo property in properties)
{
if (property.CanWrite)
{
object propertyValue = objectGenerator.GenerateObject(property.PropertyType, createdObjectReferences);
property.SetValue(obj, propertyValue, null);
}
}
}
private static void SetPublicFields(Type type, object obj, Dictionary<Type, object> createdObjectReferences)
{
FieldInfo[] fields = type.GetFields(BindingFlags.Public | BindingFlags.Instance);
ObjectGenerator objectGenerator = new ObjectGenerator();
foreach (FieldInfo field in fields)
{
object fieldValue = objectGenerator.GenerateObject(field.FieldType, createdObjectReferences);
field.SetValue(obj, fieldValue);
}
}
private class SimpleTypeObjectGenerator
{
private long _index = 0;
private static readonly Dictionary<Type, Func<long, object>> DefaultGenerators = InitializeGenerators();
[SuppressMessage("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity", Justification = "These are simple type factories and cannot be split up.")]
private static Dictionary<Type, Func<long, object>> InitializeGenerators()
{
return new Dictionary<Type, Func<long, object>>
{
{ typeof(Boolean), index => true },
{ typeof(Byte), index => (Byte)64 },
{ typeof(Char), index => (Char)65 },
{ typeof(DateTime), index => DateTime.Now },
{ typeof(DateTimeOffset), index => new DateTimeOffset(DateTime.Now) },
{ typeof(DBNull), index => DBNull.Value },
{ typeof(Decimal), index => (Decimal)index },
{ typeof(Double), index => (Double)(index + 0.1) },
{ typeof(Guid), index => Guid.NewGuid() },
{ typeof(Int16), index => (Int16)(index % Int16.MaxValue) },
{ typeof(Int32), index => (Int32)(index % Int32.MaxValue) },
{ typeof(Int64), index => (Int64)index },
{ typeof(Object), index => new object() },
{ typeof(SByte), index => (SByte)64 },
{ typeof(Single), index => (Single)(index + 0.1) },
{
typeof(String), index =>
{
return String.Format(CultureInfo.CurrentCulture, "sample string {0}", index);
}
},
{
typeof(TimeSpan), index =>
{
return TimeSpan.FromTicks(1234567);
}
},
{ typeof(UInt16), index => (UInt16)(index % UInt16.MaxValue) },
{ typeof(UInt32), index => (UInt32)(index % UInt32.MaxValue) },
{ typeof(UInt64), index => (UInt64)index },
{
typeof(Uri), index =>
{
return new Uri(String.Format(CultureInfo.CurrentCulture, "http://webapihelppage{0}.com", index));
}
},
};
}
public static bool CanGenerateObject(Type type)
{
return DefaultGenerators.ContainsKey(type);
}
public object GenerateObject(Type type)
{
return DefaultGenerators[type](++_index);
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://aurora-sim.org/, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the Aurora-Sim Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Reflection;
using Nini.Config;
using OpenMetaverse;
using Aurora.Framework;
using OpenSim.Region.Framework.Interfaces;
namespace Aurora.Modules.Entities.ObjectDelete
{
public class DeleteToInventoryHolder
{
public DeRezAction action;
public UUID agentId;
public UUID folderID;
public List<ISceneEntity> objectGroups;
public bool permissionToDelete;
public bool permissionToTake;
}
/// <summary>
/// Asynchronously derez objects. This is used to derez large number of objects to inventory without holding
/// up the main client thread.
/// </summary>
public class AsyncSceneObjectGroupDeleter : INonSharedRegionModule, IAsyncSceneObjectGroupDeleter
{
private readonly Queue<DeleteToInventoryHolder> m_removeFromSimQueue = new Queue<DeleteToInventoryHolder>();
private bool DeleteLoopInUse;
/// <value>
/// Is the deleter currently enabled?
/// </value>
public bool Enabled;
private IScene m_scene;
#region INonSharedRegionModule Members
public string Name
{
get { return "AsyncSceneObjectGroupDeleter"; }
}
public Type ReplaceableInterface
{
get { return null; }
}
public void Initialise(IConfigSource source)
{
}
public void Close()
{
}
public void AddRegion(IScene scene)
{
scene.RegisterModuleInterface<IAsyncSceneObjectGroupDeleter>(this);
m_scene = scene;
}
public void RemoveRegion(IScene scene)
{
scene.UnregisterModuleInterface<IAsyncSceneObjectGroupDeleter>(this);
}
public void RegionLoaded(IScene scene)
{
}
#endregion
#region Delete To Inventory
/// <summary>
/// Delete the given object from the scene
/// </summary>
public void DeleteToInventory(DeRezAction action, UUID folderID,
List<ISceneEntity> objectGroups, UUID AgentId,
bool permissionToDelete, bool permissionToTake)
{
DeleteToInventoryHolder dtis = new DeleteToInventoryHolder
{
action = action,
folderID = folderID,
objectGroups = objectGroups,
agentId = AgentId,
permissionToDelete = permissionToDelete,
permissionToTake = permissionToTake
};
//Do this before the locking so that the objects 'appear' gone and the client doesn't think things have gone wrong
if (permissionToDelete)
{
DeleteGroups(objectGroups);
}
lock (m_removeFromSimQueue)
{
m_removeFromSimQueue.Enqueue(dtis);
}
if (!DeleteLoopInUse)
{
DeleteLoopInUse = true;
//MainConsole.Instance.Debug("[SCENE]: Starting delete loop");
Util.FireAndForget(DoDeleteObject);
}
}
private void DeleteGroups(List<ISceneEntity> objectGroups)
{
m_scene.ForEachScenePresence(delegate(IScenePresence avatar)
{
lock (objectGroups)
{
foreach (ISceneEntity grp in objectGroups)
{
avatar.ControllingClient.SendKillObject(
m_scene.RegionInfo.RegionHandle,
grp.ChildrenEntities().ToArray());
}
}
});
}
public void DoDeleteObject(object o)
{
if (DeleteObject())
{
//Requeue us if there is some left
Util.FireAndForget(DoDeleteObject);
}
else
{
DeleteLoopInUse = false;
//MainConsole.Instance.Debug("[SCENE]: Ending delete loop");
}
}
public bool DeleteObject()
{
DeleteToInventoryHolder x = null;
try
{
int left = 0;
lock (m_removeFromSimQueue)
{
left = m_removeFromSimQueue.Count;
}
if (left > 0)
{
lock (m_removeFromSimQueue)
{
x = m_removeFromSimQueue.Dequeue();
}
if (x.permissionToDelete)
{
IBackupModule backup = m_scene.RequestModuleInterface<IBackupModule>();
if (backup != null)
backup.DeleteSceneObjects(x.objectGroups.ToArray(), true, true);
}
MainConsole.Instance.DebugFormat(
"[SCENE]: Sending object to user's inventory, {0} item(s) remaining.", left);
if (x.permissionToTake)
{
try
{
IInventoryAccessModule invAccess = m_scene.RequestModuleInterface<IInventoryAccessModule>();
UUID itemID;
if (invAccess != null)
invAccess.DeleteToInventory(x.action, x.folderID, x.objectGroups, x.agentId, out itemID);
}
catch (Exception e)
{
MainConsole.Instance.ErrorFormat(
"[ASYNC DELETER]: Exception background sending object: {0}{1}", e.Message, e.StackTrace);
}
}
return true;
}
}
catch (Exception e)
{
// We can't put the object group details in here since the root part may have disappeared (which is where these sit).
// FIXME: This needs to be fixed.
MainConsole.Instance.ErrorFormat(
"[SCENE]: Queued sending of scene object to agent {0} {1} failed: {2}",
(x != null ? x.agentId.ToString() : "unavailable"),
(x != null ? x.agentId.ToString() : "unavailable"), e);
}
//MainConsole.Instance.Debug("[SCENE]: No objects left in delete queue.");
return false;
}
#endregion
}
}
| |
using UnityEngine;
using System.Collections;
using System.Collections.Generic;
using UnityEditor;
using Xft;
public class XEditorTool
{
public static float LabelWidth = 130f;
public static float MinHeight = 26f;
public static float MinHeightBig = 29f;
public static int ColorTexWidth = 512;
public static int ColorTexHeight = 32;
public enum EArea
{
CheckBox,
Texture,
AlwaysToggle,
None,
}
public Dictionary<string, XArea> XAreas = new Dictionary<string, XArea>();
public EffectLayerCustom MyEditor;
public class XArea
{
public bool Open = false;
public Rect LastRect;
public bool Enable = false;
}
static public string GetXffectPath()
{
Shader temp = Shader.Find ("Xffect/PP/radial_blur_mask");
string assetPath = AssetDatabase.GetAssetPath(temp);
int index = assetPath.LastIndexOf("Xffect");
string basePath = assetPath.Substring (0,index+7);
return basePath;
}
public XArea BeginXArea(string id,GUIStyle style, float minHeight,EArea type,SerializedProperty bobj)
{
return BeginXArea(id,style,minHeight,type,bobj,0f);
}
public XArea BeginXArea(string id,GUIStyle style, float minHeight,EArea type,SerializedProperty bobj, float offset)
{
GUIStyle buttonStyle = EffectLayerCustom.Xbutton;
if (!XAreas.ContainsKey(id))
{
XAreas.Add(id,new XArea());
}
//find my area.
XArea m = XAreas[id];
style.stretchWidth = true;
Rect gotLastRect = GUILayoutUtility.GetRect (new GUIContent (),style,GUILayout.Height (m.LastRect.height));
GUILayout.BeginArea (m.LastRect,style);
Rect newRect = EditorGUILayout.BeginVertical();
//head bar
EditorGUILayout.BeginHorizontal();
if (type == EArea.CheckBox || type == EArea.AlwaysToggle)
{
if (type == EArea.CheckBox)
{
//little trick.
m.Enable = bobj.boolValue;
m.Enable = GUILayout.Toggle(m.Enable,GUIContent.none,EffectLayerCustom.Xtoggle, GUILayout.Width(18f),GUILayout.Height(18f));
bobj.boolValue = m.Enable;
}
else
{
m.Open = GUILayout.Toggle(m.Open,GUIContent.none,EffectLayerCustom.XToggle2, GUILayout.Width(18f),GUILayout.Height(18f));
m.Enable = true;
}
if (GUILayout.Button (id,buttonStyle,GUILayout.Height(20f)))
m.Open = !m.Open;
}
else if(type == EArea.Texture)
{
Texture tex = null;
string mname = "no material";
if (MyEditor.Script.Material != null)
{
tex = MyEditor.Script.Material.mainTexture;
mname = MyEditor.Script.Material.name;
}
GUILayout.Label(new GUIContent(tex),EffectLayerCustom.XTexture,GUILayout.Width(25f),GUILayout.Height(25f));
if (GUILayout.Button (mname,buttonStyle,GUILayout.Height(24f)))
m.Open = !m.Open;
m.Enable = true;
}
//increase a bit, need to do this.
newRect.height += 3f;
newRect.height += offset;
EditorGUILayout.EndHorizontal();
GUI.enabled = m.Enable;
if (!m.Open)
{
newRect.height = minHeight;
}
//calculate area size.
if (Event.current.type == EventType.Repaint || Event.current.type == EventType.ScrollWheel) {
newRect.x = gotLastRect.x;
newRect.y = gotLastRect.y;
newRect.width = gotLastRect.width;
newRect.height += style.padding.top+ style.padding.bottom;
if (m.LastRect != newRect) {
m.LastRect = newRect;
MyEditor.Repaint ();
}
}
return m;
}
public XArea BeginCommonArea(string id,string name,Editor editor,bool forceOpen)
{
GUIStyle buttonStyle = EffectLayerCustom.Xbutton;
GUIStyle style = EffectLayerCustom.XArea;
if (!XAreas.ContainsKey(id))
{
XAreas.Add(id,new XArea());
}
//find my area.
XArea m = XAreas[id];
style.stretchWidth = true;
Rect gotLastRect = GUILayoutUtility.GetRect (new GUIContent (),style,GUILayout.Height (m.LastRect.height));
GUILayout.BeginArea (m.LastRect,style);
Rect newRect = EditorGUILayout.BeginVertical();
//head bar
EditorGUILayout.BeginHorizontal();
m.Open = GUILayout.Toggle(m.Open,GUIContent.none,EffectLayerCustom.XToggle2, GUILayout.Width(18f),GUILayout.Height(18f));
m.Enable = true;
if (GUILayout.Button (name,buttonStyle,GUILayout.Height(20f)))
m.Open = !m.Open;
if (forceOpen)
m.Open = true;
//increase a bit, need to do this.
newRect.height += 3f;
EditorGUILayout.EndHorizontal();
GUI.enabled = m.Enable;
if (!m.Open)
{
newRect.height = MinHeight;
}
//calculate area size.
if (Event.current.type == EventType.Repaint || Event.current.type == EventType.ScrollWheel) {
newRect.x = gotLastRect.x;
newRect.y = gotLastRect.y;
newRect.width = gotLastRect.width;
newRect.height += style.padding.top+ style.padding.bottom;
if (m.LastRect != newRect) {
m.LastRect = newRect;
editor.Repaint ();
}
}
return m;
}
public void EndXArea()
{
GUI.enabled = true;
EditorGUILayout.EndVertical();
GUILayout.EndArea();
}
public void DrawEnumMagType(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(XEditorTool.LabelWidth));
obj.enumValueIndex = (int)(MAGTYPE)EditorGUILayout.EnumPopup((MAGTYPE)obj.enumValueIndex);
EditorGUILayout.EndHorizontal();
}
public void DrawInt(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.intValue = EditorGUILayout.IntField(obj.intValue);
EditorGUILayout.EndHorizontal();
}
public void DrawVector3Field(string label, string tooltip, SerializedProperty obj)
{
obj.vector3Value = EditorGUILayout.Vector3Field(label,obj.vector3Value);
}
public void DrawVector2Field(string label, string tooltip, SerializedProperty obj)
{
obj.vector2Value = EditorGUILayout.Vector2Field(label,obj.vector2Value);
}
public void DrawCurve(string label, string tooltip, SerializedProperty obj)
{
DrawCurve(label,tooltip,obj,false);
}
public void DrawCurve(string label, string tooltip, SerializedProperty obj, bool limit)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
if (!limit)
obj.animationCurveValue = EditorGUILayout.CurveField(obj.animationCurveValue);
else
obj.animationCurveValue = EditorGUILayout.CurveField(obj.animationCurveValue,Color.green, new Rect(0f,0f,99f,1f));
EditorGUILayout.EndHorizontal();
}
public void DrawCurve01(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.animationCurveValue = EditorGUILayout.CurveField(obj.animationCurveValue,Color.green, new Rect(0f,0f,1f,1f));
EditorGUILayout.EndHorizontal();
}
public void DrawSlider(string label, string tooltip, SerializedProperty obj,float min, float max)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.floatValue = EditorGUILayout.Slider(obj.floatValue,min,max);
EditorGUILayout.EndHorizontal();
}
public void DrawText(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.stringValue = EditorGUILayout.TextField(obj.stringValue);
EditorGUILayout.EndHorizontal();
}
public void DrawFloat(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.floatValue = EditorGUILayout.FloatField(obj.floatValue);
EditorGUILayout.EndHorizontal();
}
public void DrawColor(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.colorValue = EditorGUILayout.ColorField(obj.colorValue);
EditorGUILayout.EndHorizontal();
}
public bool DrawToggle(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.boolValue = EditorGUILayout.Toggle(obj.boolValue);
EditorGUILayout.EndHorizontal();
return obj.boolValue;
}
public void DrawTransform(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.objectReferenceValue = EditorGUILayout.ObjectField(obj.objectReferenceValue,typeof(Transform),true);
EditorGUILayout.EndHorizontal();
}
public void DrawTexture(string label, string tooltip, SerializedProperty obj)
{
obj.objectReferenceValue = EditorGUILayout.ObjectField(new GUIContent(label,tooltip), obj.objectReferenceValue,typeof(Texture2D),true);
}
public void DrawMaterial(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.objectReferenceValue = EditorGUILayout.ObjectField(obj.objectReferenceValue,typeof(Material),false);
EditorGUILayout.EndHorizontal();
}
public void DrawMesh(string label, string tooltip, SerializedProperty obj)
{
GUIContent content = null;
if (string.IsNullOrEmpty(tooltip))
content = new GUIContent(label);
else
content = new GUIContent(label,tooltip);
EditorGUILayout.BeginHorizontal();
EditorGUILayout.LabelField(content,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
obj.objectReferenceValue = EditorGUILayout.ObjectField(obj.objectReferenceValue,typeof(Mesh),false);
EditorGUILayout.EndHorizontal();
}
public void DrawSeparator()
{
//EditorGUILayout.LabelField(GUIContent.none,EffectLayerCustom.XSeparator);
EditorGUILayout.LabelField(GUIContent.none,EffectLayerCustom.XLabelField,GUILayout.Width(LabelWidth));
}
public void DrawInfo(string info)
{
//BeginXArea(info,EffectLayerCustom.XInfoArea,50f,EArea.None,null);
GUILayout.Label(info,EffectLayerCustom.XInfoArea);
//EndXArea();
}
static public void RefreshGradientTex(ref Texture2D tex, ColorParameter cp, Editor editor)
{
if (editor == null)
return;
if (tex == null)
{
tex =new Texture2D(ColorTexWidth,ColorTexHeight, TextureFormat.RGBA32, false);
tex.wrapMode = TextureWrapMode.Clamp;
}
RefreshEditorGradient(tex,cp);
//editor.Repaint();
}
static protected void RefreshEditorGradient (Texture2D tex, ColorParameter p)
{
Color col;
for (int x = 0; x < tex.width; x++) {
col = p.GetGradientColor(x / (float)tex.width);
for (int y=0;y<tex.height;y++)
tex.SetPixel(x,y,col);
}
tex.Apply();
}
}
| |
/*
Copyright (c) 2005 Poderosa Project, All Rights Reserved.
This file is a part of the Granados SSH Client Library that is subject to
the license included in the distributed package.
You may not use this file except in compliance with the license.
$Id: ConnectionParameter.cs,v 1.5 2011/10/27 23:21:56 kzmi Exp $
*/
using System;
using Granados.PKI;
namespace Granados {
/// <summary>
/// Fill the properties of ConnectionParameter object before you start the connection.
/// </summary>
/// <exclude/>
public class SSHConnectionParameter : ICloneable {
//protocol
private SSHProtocol _protocol;
public SSHProtocol Protocol {
get {
return _protocol;
}
set {
_versionEOL = value == SSHProtocol.SSH1 ? "\n" : "\r\n";
_protocol = value;
}
}
//algorithm
private CipherAlgorithm[] _cipherAlgorithms;
public CipherAlgorithm[] PreferableCipherAlgorithms {
get {
return _cipherAlgorithms;
}
set {
_cipherAlgorithms = value;
}
}
private PublicKeyAlgorithm[] _hostkeyAlgorithms;
public PublicKeyAlgorithm[] PreferableHostKeyAlgorithms {
get {
return _hostkeyAlgorithms;
}
set {
_hostkeyAlgorithms = value;
}
}
//account
private AuthenticationType _authtype;
public AuthenticationType AuthenticationType {
get {
return _authtype;
}
set {
_authtype = value;
}
}
private string _username;
public string UserName {
get {
return _username;
}
set {
_username = value;
}
}
private string _password;
public string Password {
get {
return _password;
}
set {
_password = value;
}
}
private string _identityFile;
public string IdentityFile {
get {
return _identityFile;
}
set {
_identityFile = value;
}
}
//host
private HostKeyCheckCallback _keycheck;
public HostKeyCheckCallback KeyCheck {
get {
return _keycheck;
}
set {
_keycheck = value;
}
}
//terminal
private string _terminalname;
public string TerminalName {
get {
return _terminalname;
}
set {
_terminalname = value;
}
}
private int _width;
public int TerminalWidth {
get {
return _width;
}
set {
_width = value;
}
}
private int _height;
public int TerminalHeight {
get {
return _height;
}
set {
_height = value;
}
}
private int _pixelWidth;
public int TerminalPixelWidth {
get {
return _pixelWidth;
}
set {
_pixelWidth = value;
}
}
private int _pixelHeight;
public int TerminalPixelHeight {
get {
return _pixelHeight;
}
set {
_pixelHeight = value;
}
}
private Random _random;
public Random Random {
get {
return _random;
}
set {
_random = value;
}
}
private bool _checkMACError;
public bool CheckMACError {
get {
return _checkMACError;
}
set {
_checkMACError = value;
}
}
//SSH2 only property
private int _windowsize;
public int WindowSize {
get {
return _windowsize;
}
set {
_windowsize = value;
}
}
//SSH2 only property
private int _maxpacketsize;
public int MaxPacketSize {
get {
return _maxpacketsize;
}
set {
_maxpacketsize = value;
}
}
//some server may expect irregular end-of-line character(s).
//initial value is '\n' for SSH1 and '/r/n' for SSH2
private string _versionEOL;
public string VersionEOL {
get {
return _versionEOL;
}
set {
_versionEOL = value;
}
}
//protocol negotiation tracer (optional)
private ISSHEventTracer _tracer;
public ISSHEventTracer EventTracer {
get {
return _tracer;
}
set {
_tracer = value;
}
}
//Agent forward (optional)
private IAgentForward _agentForward;
public IAgentForward AgentForward {
get {
return _agentForward;
}
set {
_agentForward = value;
}
}
public SSHConnectionParameter() {
_random = new Random();
_authtype = AuthenticationType.Password;
_terminalname = "vt100";
_width = 80;
_height = 25;
_protocol = SSHProtocol.SSH2;
_cipherAlgorithms = new CipherAlgorithm[] { CipherAlgorithm.AES256CTR, CipherAlgorithm.AES256, CipherAlgorithm.AES192CTR, CipherAlgorithm.AES192, CipherAlgorithm.AES128CTR, CipherAlgorithm.AES128, CipherAlgorithm.Blowfish, CipherAlgorithm.TripleDES };
_hostkeyAlgorithms = new PublicKeyAlgorithm[] { PublicKeyAlgorithm.DSA, PublicKeyAlgorithm.RSA };
_windowsize = 0x1000;
_maxpacketsize = 0x10000;
_checkMACError = true;
_tracer = null;
}
public object Clone() {
SSHConnectionParameter n = new SSHConnectionParameter();
n._authtype = _authtype;
n._cipherAlgorithms = _cipherAlgorithms;
n._height = _height;
n._hostkeyAlgorithms = _hostkeyAlgorithms;
n._identityFile = _identityFile;
n._keycheck = _keycheck;
n._maxpacketsize = _maxpacketsize;
n._password = _password;
n._protocol = _protocol;
n._random = _random;
n._terminalname = _terminalname;
n._username = _username;
n._width = _width;
n._windowsize = _windowsize;
n._checkMACError = _checkMACError;
n._tracer = _tracer;
n._agentForward = _agentForward;
return n;
}
}
//To receive the events of the SSH protocol negotiation, set an implementation of this interface to ConnectionParameter
//note that :
// * these methods are called by different threads asynchronously
// * DO NOT throw any exceptions in the implementation
/// <summary>
///
/// </summary>
/// <exclude/>
public interface ISSHEventTracer {
void OnTranmission(string type, string detail);
void OnReception(string type, string detail);
}
}
| |
// Copyright 2021 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Generated code. DO NOT EDIT!
using gagvr = Google.Ads.GoogleAds.V9.Resources;
using gax = Google.Api.Gax;
using gaxgrpc = Google.Api.Gax.Grpc;
using gaxgrpccore = Google.Api.Gax.Grpc.GrpcCore;
using proto = Google.Protobuf;
using grpccore = Grpc.Core;
using grpcinter = Grpc.Core.Interceptors;
using sys = System;
using scg = System.Collections.Generic;
using sco = System.Collections.ObjectModel;
using st = System.Threading;
using stt = System.Threading.Tasks;
namespace Google.Ads.GoogleAds.V9.Services
{
/// <summary>Settings for <see cref="LifeEventServiceClient"/> instances.</summary>
public sealed partial class LifeEventServiceSettings : gaxgrpc::ServiceSettingsBase
{
/// <summary>Get a new instance of the default <see cref="LifeEventServiceSettings"/>.</summary>
/// <returns>A new instance of the default <see cref="LifeEventServiceSettings"/>.</returns>
public static LifeEventServiceSettings GetDefault() => new LifeEventServiceSettings();
/// <summary>Constructs a new <see cref="LifeEventServiceSettings"/> object with default settings.</summary>
public LifeEventServiceSettings()
{
}
private LifeEventServiceSettings(LifeEventServiceSettings existing) : base(existing)
{
gax::GaxPreconditions.CheckNotNull(existing, nameof(existing));
GetLifeEventSettings = existing.GetLifeEventSettings;
OnCopy(existing);
}
partial void OnCopy(LifeEventServiceSettings existing);
/// <summary>
/// <see cref="gaxgrpc::CallSettings"/> for synchronous and asynchronous calls to
/// <c>LifeEventServiceClient.GetLifeEvent</c> and <c>LifeEventServiceClient.GetLifeEventAsync</c>.
/// </summary>
/// <remarks>
/// <list type="bullet">
/// <item><description>Initial retry delay: 5000 milliseconds.</description></item>
/// <item><description>Retry delay multiplier: 1.3</description></item>
/// <item><description>Retry maximum delay: 60000 milliseconds.</description></item>
/// <item><description>Maximum attempts: Unlimited</description></item>
/// <item>
/// <description>
/// Retriable status codes: <see cref="grpccore::StatusCode.Unavailable"/>,
/// <see cref="grpccore::StatusCode.DeadlineExceeded"/>.
/// </description>
/// </item>
/// <item><description>Timeout: 3600 seconds.</description></item>
/// </list>
/// </remarks>
public gaxgrpc::CallSettings GetLifeEventSettings { get; set; } = gaxgrpc::CallSettingsExtensions.WithRetry(gaxgrpc::CallSettings.FromExpiration(gax::Expiration.FromTimeout(sys::TimeSpan.FromMilliseconds(3600000))), gaxgrpc::RetrySettings.FromExponentialBackoff(maxAttempts: 2147483647, initialBackoff: sys::TimeSpan.FromMilliseconds(5000), maxBackoff: sys::TimeSpan.FromMilliseconds(60000), backoffMultiplier: 1.3, retryFilter: gaxgrpc::RetrySettings.FilterForStatusCodes(grpccore::StatusCode.Unavailable, grpccore::StatusCode.DeadlineExceeded)));
/// <summary>Creates a deep clone of this object, with all the same property values.</summary>
/// <returns>A deep clone of this <see cref="LifeEventServiceSettings"/> object.</returns>
public LifeEventServiceSettings Clone() => new LifeEventServiceSettings(this);
}
/// <summary>
/// Builder class for <see cref="LifeEventServiceClient"/> to provide simple configuration of credentials, endpoint
/// etc.
/// </summary>
internal sealed partial class LifeEventServiceClientBuilder : gaxgrpc::ClientBuilderBase<LifeEventServiceClient>
{
/// <summary>The settings to use for RPCs, or <c>null</c> for the default settings.</summary>
public LifeEventServiceSettings Settings { get; set; }
/// <summary>Creates a new builder with default settings.</summary>
public LifeEventServiceClientBuilder()
{
UseJwtAccessWithScopes = LifeEventServiceClient.UseJwtAccessWithScopes;
}
partial void InterceptBuild(ref LifeEventServiceClient client);
partial void InterceptBuildAsync(st::CancellationToken cancellationToken, ref stt::Task<LifeEventServiceClient> task);
/// <summary>Builds the resulting client.</summary>
public override LifeEventServiceClient Build()
{
LifeEventServiceClient client = null;
InterceptBuild(ref client);
return client ?? BuildImpl();
}
/// <summary>Builds the resulting client asynchronously.</summary>
public override stt::Task<LifeEventServiceClient> BuildAsync(st::CancellationToken cancellationToken = default)
{
stt::Task<LifeEventServiceClient> task = null;
InterceptBuildAsync(cancellationToken, ref task);
return task ?? BuildAsyncImpl(cancellationToken);
}
private LifeEventServiceClient BuildImpl()
{
Validate();
grpccore::CallInvoker callInvoker = CreateCallInvoker();
return LifeEventServiceClient.Create(callInvoker, Settings);
}
private async stt::Task<LifeEventServiceClient> BuildAsyncImpl(st::CancellationToken cancellationToken)
{
Validate();
grpccore::CallInvoker callInvoker = await CreateCallInvokerAsync(cancellationToken).ConfigureAwait(false);
return LifeEventServiceClient.Create(callInvoker, Settings);
}
/// <summary>Returns the endpoint for this builder type, used if no endpoint is otherwise specified.</summary>
protected override string GetDefaultEndpoint() => LifeEventServiceClient.DefaultEndpoint;
/// <summary>
/// Returns the default scopes for this builder type, used if no scopes are otherwise specified.
/// </summary>
protected override scg::IReadOnlyList<string> GetDefaultScopes() => LifeEventServiceClient.DefaultScopes;
/// <summary>Returns the channel pool to use when no other options are specified.</summary>
protected override gaxgrpc::ChannelPool GetChannelPool() => LifeEventServiceClient.ChannelPool;
/// <summary>Returns the default <see cref="gaxgrpc::GrpcAdapter"/>to use if not otherwise specified.</summary>
protected override gaxgrpc::GrpcAdapter DefaultGrpcAdapter => gaxgrpccore::GrpcCoreAdapter.Instance;
}
/// <summary>LifeEventService client wrapper, for convenient use.</summary>
/// <remarks>
/// Service to fetch Google Ads Life Events.
/// </remarks>
public abstract partial class LifeEventServiceClient
{
/// <summary>
/// The default endpoint for the LifeEventService service, which is a host of "googleads.googleapis.com" and a
/// port of 443.
/// </summary>
public static string DefaultEndpoint { get; } = "googleads.googleapis.com:443";
/// <summary>The default LifeEventService scopes.</summary>
/// <remarks>
/// The default LifeEventService scopes are:
/// <list type="bullet"><item><description>https://www.googleapis.com/auth/adwords</description></item></list>
/// </remarks>
public static scg::IReadOnlyList<string> DefaultScopes { get; } = new sco::ReadOnlyCollection<string>(new string[]
{
"https://www.googleapis.com/auth/adwords",
});
internal static gaxgrpc::ChannelPool ChannelPool { get; } = new gaxgrpc::ChannelPool(DefaultScopes, UseJwtAccessWithScopes);
internal static bool UseJwtAccessWithScopes
{
get
{
bool useJwtAccessWithScopes = true;
MaybeUseJwtAccessWithScopes(ref useJwtAccessWithScopes);
return useJwtAccessWithScopes;
}
}
static partial void MaybeUseJwtAccessWithScopes(ref bool useJwtAccessWithScopes);
/// <summary>
/// Asynchronously creates a <see cref="LifeEventServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use <see cref="LifeEventServiceClientBuilder"/>.
/// </summary>
/// <param name="cancellationToken">
/// The <see cref="st::CancellationToken"/> to use while creating the client.
/// </param>
/// <returns>The task representing the created <see cref="LifeEventServiceClient"/>.</returns>
public static stt::Task<LifeEventServiceClient> CreateAsync(st::CancellationToken cancellationToken = default) =>
new LifeEventServiceClientBuilder().BuildAsync(cancellationToken);
/// <summary>
/// Synchronously creates a <see cref="LifeEventServiceClient"/> using the default credentials, endpoint and
/// settings. To specify custom credentials or other settings, use <see cref="LifeEventServiceClientBuilder"/>.
/// </summary>
/// <returns>The created <see cref="LifeEventServiceClient"/>.</returns>
public static LifeEventServiceClient Create() => new LifeEventServiceClientBuilder().Build();
/// <summary>
/// Creates a <see cref="LifeEventServiceClient"/> which uses the specified call invoker for remote operations.
/// </summary>
/// <param name="callInvoker">
/// The <see cref="grpccore::CallInvoker"/> for remote operations. Must not be null.
/// </param>
/// <param name="settings">Optional <see cref="LifeEventServiceSettings"/>.</param>
/// <returns>The created <see cref="LifeEventServiceClient"/>.</returns>
internal static LifeEventServiceClient Create(grpccore::CallInvoker callInvoker, LifeEventServiceSettings settings = null)
{
gax::GaxPreconditions.CheckNotNull(callInvoker, nameof(callInvoker));
grpcinter::Interceptor interceptor = settings?.Interceptor;
if (interceptor != null)
{
callInvoker = grpcinter::CallInvokerExtensions.Intercept(callInvoker, interceptor);
}
LifeEventService.LifeEventServiceClient grpcClient = new LifeEventService.LifeEventServiceClient(callInvoker);
return new LifeEventServiceClientImpl(grpcClient, settings);
}
/// <summary>
/// Shuts down any channels automatically created by <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/>. Channels which weren't automatically created are not
/// affected.
/// </summary>
/// <remarks>
/// After calling this method, further calls to <see cref="Create()"/> and
/// <see cref="CreateAsync(st::CancellationToken)"/> will create new channels, which could in turn be shut down
/// by another call to this method.
/// </remarks>
/// <returns>A task representing the asynchronous shutdown operation.</returns>
public static stt::Task ShutdownDefaultChannelsAsync() => ChannelPool.ShutdownChannelsAsync();
/// <summary>The underlying gRPC LifeEventService client</summary>
public virtual LifeEventService.LifeEventServiceClient GrpcClient => throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::LifeEvent GetLifeEvent(GetLifeEventRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LifeEvent> GetLifeEventAsync(GetLifeEventRequest request, gaxgrpc::CallSettings callSettings = null) =>
throw new sys::NotImplementedException();
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LifeEvent> GetLifeEventAsync(GetLifeEventRequest request, st::CancellationToken cancellationToken) =>
GetLifeEventAsync(request, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the LifeEvent to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::LifeEvent GetLifeEvent(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLifeEvent(new GetLifeEventRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the LifeEvent to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LifeEvent> GetLifeEventAsync(string resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLifeEventAsync(new GetLifeEventRequest
{
ResourceName = gax::GaxPreconditions.CheckNotNullOrEmpty(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the LifeEvent to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LifeEvent> GetLifeEventAsync(string resourceName, st::CancellationToken cancellationToken) =>
GetLifeEventAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the LifeEvent to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public virtual gagvr::LifeEvent GetLifeEvent(gagvr::LifeEventName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLifeEvent(new GetLifeEventRequest
{
ResourceNameAsLifeEventName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the LifeEvent to fetch.
/// </param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LifeEvent> GetLifeEventAsync(gagvr::LifeEventName resourceName, gaxgrpc::CallSettings callSettings = null) =>
GetLifeEventAsync(new GetLifeEventRequest
{
ResourceNameAsLifeEventName = gax::GaxPreconditions.CheckNotNull(resourceName, nameof(resourceName)),
}, callSettings);
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="resourceName">
/// Required. Resource name of the LifeEvent to fetch.
/// </param>
/// <param name="cancellationToken">A <see cref="st::CancellationToken"/> to use for this RPC.</param>
/// <returns>A Task containing the RPC response.</returns>
public virtual stt::Task<gagvr::LifeEvent> GetLifeEventAsync(gagvr::LifeEventName resourceName, st::CancellationToken cancellationToken) =>
GetLifeEventAsync(resourceName, gaxgrpc::CallSettings.FromCancellationToken(cancellationToken));
}
/// <summary>LifeEventService client wrapper implementation, for convenient use.</summary>
/// <remarks>
/// Service to fetch Google Ads Life Events.
/// </remarks>
public sealed partial class LifeEventServiceClientImpl : LifeEventServiceClient
{
private readonly gaxgrpc::ApiCall<GetLifeEventRequest, gagvr::LifeEvent> _callGetLifeEvent;
/// <summary>
/// Constructs a client wrapper for the LifeEventService service, with the specified gRPC client and settings.
/// </summary>
/// <param name="grpcClient">The underlying gRPC client.</param>
/// <param name="settings">The base <see cref="LifeEventServiceSettings"/> used within this client.</param>
public LifeEventServiceClientImpl(LifeEventService.LifeEventServiceClient grpcClient, LifeEventServiceSettings settings)
{
GrpcClient = grpcClient;
LifeEventServiceSettings effectiveSettings = settings ?? LifeEventServiceSettings.GetDefault();
gaxgrpc::ClientHelper clientHelper = new gaxgrpc::ClientHelper(effectiveSettings);
_callGetLifeEvent = clientHelper.BuildApiCall<GetLifeEventRequest, gagvr::LifeEvent>(grpcClient.GetLifeEventAsync, grpcClient.GetLifeEvent, effectiveSettings.GetLifeEventSettings).WithGoogleRequestParam("resource_name", request => request.ResourceName);
Modify_ApiCall(ref _callGetLifeEvent);
Modify_GetLifeEventApiCall(ref _callGetLifeEvent);
OnConstruction(grpcClient, effectiveSettings, clientHelper);
}
partial void Modify_ApiCall<TRequest, TResponse>(ref gaxgrpc::ApiCall<TRequest, TResponse> call) where TRequest : class, proto::IMessage<TRequest> where TResponse : class, proto::IMessage<TResponse>;
partial void Modify_GetLifeEventApiCall(ref gaxgrpc::ApiCall<GetLifeEventRequest, gagvr::LifeEvent> call);
partial void OnConstruction(LifeEventService.LifeEventServiceClient grpcClient, LifeEventServiceSettings effectiveSettings, gaxgrpc::ClientHelper clientHelper);
/// <summary>The underlying gRPC LifeEventService client</summary>
public override LifeEventService.LifeEventServiceClient GrpcClient { get; }
partial void Modify_GetLifeEventRequest(ref GetLifeEventRequest request, ref gaxgrpc::CallSettings settings);
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>The RPC response.</returns>
public override gagvr::LifeEvent GetLifeEvent(GetLifeEventRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetLifeEventRequest(ref request, ref callSettings);
return _callGetLifeEvent.Sync(request, callSettings);
}
/// <summary>
/// Returns the requested life event in full detail.
/// </summary>
/// <param name="request">The request object containing all of the parameters for the API call.</param>
/// <param name="callSettings">If not null, applies overrides to this RPC call.</param>
/// <returns>A Task containing the RPC response.</returns>
public override stt::Task<gagvr::LifeEvent> GetLifeEventAsync(GetLifeEventRequest request, gaxgrpc::CallSettings callSettings = null)
{
Modify_GetLifeEventRequest(ref request, ref callSettings);
return _callGetLifeEvent.Async(request, callSettings);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
using System.IO;
using Microsoft.Xml;
using System.Net;
using System.Text;
//using Microsoft.Xml.Utils;
using System.Reflection;
using System.Diagnostics;
using System.Collections.Generic;
using System.Runtime.Versioning;
namespace Microsoft.Xml.Resolvers
{
//
// XmlPreloadedResolver is an XmlResolver that which can be pre-loaded with data.
// By default it contains well-known DTDs for XHTML 1.0 and RSS 0.91.
// Custom mappings of URIs to data can be added with the Add method.
//
public partial class XmlPreloadedResolver : XmlResolver
{
//
// PreloadedData class
//
private abstract class PreloadedData
{
// Returns preloaded data as Stream; Stream must always be supported
internal abstract Stream AsStream();
// Returns preloaded data as TextReader, or throws when not supported
internal virtual TextReader AsTextReader()
{
throw new XmlException(ResXml.Xml_UnsupportedClass);
}
// Returns true for types that are supported for this preloaded data; Stream must always be supported
internal virtual bool SupportsType(Type type)
{
if (type == null || type == typeof(Stream))
{
return true;
}
return false;
}
};
//
// XmlKnownDtdData class
//
private class XmlKnownDtdData : PreloadedData
{
internal string publicId;
internal string systemId;
private string _resourceName;
internal XmlKnownDtdData(string publicId, string systemId, string resourceName)
{
this.publicId = publicId;
this.systemId = systemId;
_resourceName = resourceName;
}
internal override Stream AsStream()
{
throw new NotImplementedException();
}
}
private class ByteArrayChunk : PreloadedData
{
private byte[] _array;
private int _offset;
private int _length;
internal ByteArrayChunk(byte[] array)
: this(array, 0, array.Length)
{
}
internal ByteArrayChunk(byte[] array, int offset, int length)
{
_array = array;
_offset = offset;
_length = length;
}
internal override Stream AsStream()
{
return new MemoryStream(_array, _offset, _length);
}
}
private class StringData : PreloadedData
{
private string _str;
internal StringData(string str)
{
_str = str;
}
internal override Stream AsStream()
{
return new MemoryStream(Encoding.Unicode.GetBytes(_str));
}
internal override TextReader AsTextReader()
{
return new StringReader(_str);
}
internal override bool SupportsType(Type type)
{
if (type == typeof(TextReader))
{
return true;
}
return base.SupportsType(type);
}
}
//
// Fields
//
private XmlResolver _fallbackResolver;
private Dictionary<Uri, PreloadedData> _mappings;
private XmlKnownDtds _preloadedDtds;
//
// Static/constant fiels
//
private static XmlKnownDtdData[] s_xhtml10_Dtd = new XmlKnownDtdData[] {
new XmlKnownDtdData( "-//W3C//DTD XHTML 1.0 Strict//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd", "xhtml1-strict.dtd" ),
new XmlKnownDtdData( "-//W3C//DTD XHTML 1.0 Transitional//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd", "xhtml1-transitional.dtd" ),
new XmlKnownDtdData( "-//W3C//DTD XHTML 1.0 Frameset//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml1-frameset.dtd", "xhtml1-frameset.dtd" ),
new XmlKnownDtdData( "-//W3C//ENTITIES Latin 1 for XHTML//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml-lat1.ent", "xhtml-lat1.ent" ),
new XmlKnownDtdData( "-//W3C//ENTITIES Symbols for XHTML//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml-symbol.ent", "xhtml-symbol.ent" ),
new XmlKnownDtdData( "-//W3C//ENTITIES Special for XHTML//EN", "http://www.w3.org/TR/xhtml1/DTD/xhtml-special.ent", "xhtml-special.ent" ),
};
private static XmlKnownDtdData[] s_rss091_Dtd = new XmlKnownDtdData[] {
new XmlKnownDtdData( "-//Netscape Communications//DTD RSS 0.91//EN", "http://my.netscape.com/publish/formats/rss-0.91.dtd", "rss-0.91.dtd" ),
};
//
// Constructors
//
public XmlPreloadedResolver()
: this(null)
{
}
public XmlPreloadedResolver(XmlKnownDtds preloadedDtds)
: this(null, preloadedDtds, null)
{
}
public XmlPreloadedResolver(XmlResolver fallbackResolver)
: this(fallbackResolver, XmlKnownDtds.All, null)
{
}
public XmlPreloadedResolver(XmlResolver fallbackResolver, XmlKnownDtds preloadedDtds)
: this(fallbackResolver, preloadedDtds, null)
{
}
public XmlPreloadedResolver(XmlResolver fallbackResolver, XmlKnownDtds preloadedDtds, IEqualityComparer<Uri> uriComparer)
{
_fallbackResolver = fallbackResolver;
_mappings = new Dictionary<Uri, PreloadedData>(16, uriComparer);
_preloadedDtds = preloadedDtds;
// load known DTDs
if (preloadedDtds != 0)
{
if ((preloadedDtds & XmlKnownDtds.Xhtml10) != 0)
{
AddKnownDtd(s_xhtml10_Dtd);
}
if ((preloadedDtds & XmlKnownDtds.Rss091) != 0)
{
AddKnownDtd(s_rss091_Dtd);
}
}
}
public override Uri ResolveUri(Uri baseUri, string relativeUri)
{
// 1) special-case well-known public IDs
// 2) To make FxCop happy we need to use StartsWith() overload that takes StringComparison ->
// .StartsWith(string) is equal to .StartsWith(string, StringComparison.CurrentCulture);
if (relativeUri != null && relativeUri.StartsWith("-//", StringComparison.CurrentCulture))
{
// 1) XHTML 1.0 public IDs
// 2) To make FxCop happy we need to use StartsWith() overload that takes StringComparison ->
// .StartsWith(string) is equal to .StartsWith(string, StringComparison.CurrentCulture);
if ((_preloadedDtds & XmlKnownDtds.Xhtml10) != 0 && relativeUri.StartsWith("-//W3C//", StringComparison.CurrentCulture))
{
for (int i = 0; i < s_xhtml10_Dtd.Length; i++)
{
if (relativeUri == s_xhtml10_Dtd[i].publicId)
{
return new Uri(relativeUri, UriKind.Relative);
}
}
}
// RSS 0.91 public IDs
if ((_preloadedDtds & XmlKnownDtds.Rss091) != 0)
{
Debug.Assert(s_rss091_Dtd.Length == 1);
if (relativeUri == s_rss091_Dtd[0].publicId)
{
return new Uri(relativeUri, UriKind.Relative);
}
}
}
return base.ResolveUri(baseUri, relativeUri);
}
public override Object GetEntity(Uri absoluteUri, string role, Type ofObjectToReturn)
{
if (absoluteUri == null)
{
throw new ArgumentNullException("absoluteUri");
}
PreloadedData data;
if (!_mappings.TryGetValue(absoluteUri, out data))
{
if (_fallbackResolver != null)
{
return _fallbackResolver.GetEntity(absoluteUri, role, ofObjectToReturn);
}
throw new XmlException(string.Format(ResXml.Xml_CannotResolveUrl, absoluteUri.ToString()));
}
if (ofObjectToReturn == null || ofObjectToReturn == typeof(Stream) || ofObjectToReturn == typeof(Object))
{
return data.AsStream();
}
else if (ofObjectToReturn == typeof(TextReader))
{
return data.AsTextReader();
}
else
{
throw new XmlException(ResXml.Xml_UnsupportedClass);
}
}
public override ICredentials Credentials
{
set
{
if (_fallbackResolver != null)
{
_fallbackResolver.Credentials = value;
}
}
}
public override bool SupportsType(Uri absoluteUri, Type type)
{
if (absoluteUri == null)
{
throw new ArgumentNullException("absoluteUri");
}
PreloadedData data;
if (!_mappings.TryGetValue(absoluteUri, out data))
{
if (_fallbackResolver != null)
{
return _fallbackResolver.SupportsType(absoluteUri, type);
}
return base.SupportsType(absoluteUri, type);
}
return data.SupportsType(type);
}
public void Add(Uri uri, byte[] value)
{
if (uri == null)
{
throw new ArgumentNullException("uri");
}
if (value == null)
{
throw new ArgumentNullException("value");
}
Add(uri, new ByteArrayChunk(value, 0, value.Length));
}
public void Add(Uri uri, byte[] value, int offset, int count)
{
if (uri == null)
{
throw new ArgumentNullException("uri");
}
if (value == null)
{
throw new ArgumentNullException("value");
}
if (count < 0)
{
throw new ArgumentOutOfRangeException("count");
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException("offset");
}
if (value.Length - offset < count)
{
throw new ArgumentOutOfRangeException("count");
}
Add(uri, new ByteArrayChunk(value, offset, count));
}
public void Add(Uri uri, string value)
{
if (uri == null)
{
throw new ArgumentNullException("uri");
}
if (value == null)
{
throw new ArgumentNullException("value");
}
Add(uri, new StringData(value));
}
public IEnumerable<Uri> PreloadedUris
{
get
{
// read-only collection of keys
return _mappings.Keys;
}
}
public void Remove(Uri uri)
{
if (uri == null)
{
throw new ArgumentNullException("uri");
}
_mappings.Remove(uri);
}
//
// Private implementation methods
//
private void Add(Uri uri, PreloadedData data)
{
Debug.Assert(uri != null);
// override if exists
if (_mappings.ContainsKey(uri))
{
_mappings[uri] = data;
}
else
{
_mappings.Add(uri, data);
}
}
private void AddKnownDtd(XmlKnownDtdData[] dtdSet)
{
for (int i = 0; i < dtdSet.Length; i++)
{
XmlKnownDtdData dtdInfo = dtdSet[i];
_mappings.Add(new Uri(dtdInfo.publicId, UriKind.RelativeOrAbsolute), dtdInfo);
_mappings.Add(new Uri(dtdInfo.systemId, UriKind.RelativeOrAbsolute), dtdInfo);
}
}
}
}
| |
#region Copyright notice and license
// Copyright 2015 gRPC authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#endregion
using System;
using System.Runtime.InteropServices;
using System.Threading;
using Grpc.Core.Utils;
namespace Grpc.Core.Internal
{
/// <summary>
/// gpr_timespec from grpc/support/time.h
/// </summary>
[StructLayout(LayoutKind.Sequential)]
internal struct Timespec : IEquatable<Timespec>
{
/// <summary>
/// Indicates whether this instance and a specified object are equal.
/// </summary>
public override bool Equals(object obj)
{
return obj is Timespec && Equals((Timespec)obj);
}
/// <summary>
/// Returns the hash code for this instance.
/// </summary>
public override int GetHashCode()
{
unchecked
{
const int Prime = 373587911;
int i = (int)clock_type;
i = (i * Prime) ^ tv_nsec;
i = (i * Prime) ^ tv_nsec.GetHashCode();
return i;
}
}
/// <summary>
/// Returns the full type name of this instance.
/// </summary>
public override string ToString()
{
return typeof(Timespec).FullName;
}
/// <summary>
/// Indicates whether this instance and a specified object are equal.
/// </summary>
public bool Equals(Timespec other)
{
return this.clock_type == other.clock_type
&& this.tv_nsec == other.tv_nsec
&& this.tv_sec == other.tv_sec;
}
const long NanosPerSecond = 1000 * 1000 * 1000;
const long NanosPerTick = 100;
const long TicksPerSecond = NanosPerSecond / NanosPerTick;
static readonly NativeMethods Native = NativeMethods.Get();
static readonly DateTime UnixEpoch = new DateTime(1970, 1, 1, 0, 0, 0, 0, DateTimeKind.Utc);
public Timespec(long tv_sec, int tv_nsec) : this(tv_sec, tv_nsec, ClockType.Realtime)
{
}
public Timespec(long tv_sec, int tv_nsec, ClockType clock_type)
{
this.tv_sec = tv_sec;
this.tv_nsec = tv_nsec;
this.clock_type = clock_type;
}
private long tv_sec;
private int tv_nsec;
private ClockType clock_type;
/// <summary>
/// Timespec a long time in the future.
/// </summary>
public static Timespec InfFuture
{
get
{
return new Timespec(long.MaxValue, 0, ClockType.Realtime);
}
}
/// <summary>
/// Timespec a long time in the past.
/// </summary>
public static Timespec InfPast
{
get
{
return new Timespec(long.MinValue, 0, ClockType.Realtime);
}
}
/// <summary>
/// Return Timespec representing the current time.
/// </summary>
public static Timespec Now
{
get
{
return Native.gprsharp_now(ClockType.Realtime);
}
}
/// <summary>
/// Seconds since unix epoch.
/// </summary>
public long TimevalSeconds
{
get
{
return tv_sec;
}
}
/// <summary>
/// The nanoseconds part of timeval.
/// </summary>
public int TimevalNanos
{
get
{
return tv_nsec;
}
}
/// <summary>
/// Converts the timespec to desired clock type.
/// </summary>
public Timespec ToClockType(ClockType targetClock)
{
return Native.gprsharp_convert_clock_type(this, targetClock);
}
/// <summary>
/// Converts Timespec to DateTime.
/// Timespec needs to be of type GPRClockType.Realtime and needs to represent a legal value.
/// DateTime has lower resolution (100ns), so rounding can occurs.
/// Value are always rounded up to the nearest DateTime value in the future.
///
/// For Timespec.InfFuture or if timespec is after the largest representable DateTime, DateTime.MaxValue is returned.
/// For Timespec.InfPast or if timespec is before the lowest representable DateTime, DateTime.MinValue is returned.
///
/// Unless DateTime.MaxValue or DateTime.MinValue is returned, the resulting DateTime is always in UTC
/// (DateTimeKind.Utc)
/// </summary>
public DateTime ToDateTime()
{
GrpcPreconditions.CheckState(tv_nsec >= 0 && tv_nsec < NanosPerSecond);
GrpcPreconditions.CheckState(clock_type == ClockType.Realtime);
// fast path for InfFuture
if (this.Equals(InfFuture))
{
return DateTime.MaxValue;
}
// fast path for InfPast
if (this.Equals(InfPast))
{
return DateTime.MinValue;
}
try
{
// convert nanos to ticks, round up to the nearest tick
long ticksFromNanos = tv_nsec / NanosPerTick + ((tv_nsec % NanosPerTick != 0) ? 1 : 0);
long ticksTotal = checked(tv_sec * TicksPerSecond + ticksFromNanos);
return UnixEpoch.AddTicks(ticksTotal);
}
catch (OverflowException)
{
// ticks out of long range
return tv_sec > 0 ? DateTime.MaxValue : DateTime.MinValue;
}
catch (ArgumentOutOfRangeException)
{
// resulting date time would be larger than MaxValue
return tv_sec > 0 ? DateTime.MaxValue : DateTime.MinValue;
}
}
/// <summary>
/// Creates DateTime to Timespec.
/// DateTime has to be in UTC (DateTimeKind.Utc) unless it's DateTime.MaxValue or DateTime.MinValue.
/// For DateTime.MaxValue of date time after the largest representable Timespec, Timespec.InfFuture is returned.
/// For DateTime.MinValue of date time before the lowest representable Timespec, Timespec.InfPast is returned.
/// </summary>
/// <returns>The date time.</returns>
/// <param name="dateTime">Date time.</param>
public static Timespec FromDateTime(DateTime dateTime)
{
if (dateTime == DateTime.MaxValue)
{
return Timespec.InfFuture;
}
if (dateTime == DateTime.MinValue)
{
return Timespec.InfPast;
}
GrpcPreconditions.CheckArgument(dateTime.Kind == DateTimeKind.Utc, "dateTime needs of kind DateTimeKind.Utc or be equal to DateTime.MaxValue or DateTime.MinValue.");
try
{
TimeSpan timeSpan = dateTime - UnixEpoch;
long ticks = timeSpan.Ticks;
long seconds = ticks / TicksPerSecond;
int nanos = (int)((ticks % TicksPerSecond) * NanosPerTick);
if (nanos < 0)
{
// correct the result based on C# modulo semantics for negative dividend
seconds--;
nanos += (int)NanosPerSecond;
}
return new Timespec(seconds, nanos);
}
catch (ArgumentOutOfRangeException)
{
return dateTime > UnixEpoch ? Timespec.InfFuture : Timespec.InfPast;
}
}
/// <summary>
/// Gets current timestamp using <c>GPRClockType.Precise</c>.
/// Only available internally because core needs to be compiled with
/// GRPC_TIMERS_RDTSC support for this to use RDTSC.
/// </summary>
internal static Timespec PreciseNow
{
get
{
return Native.gprsharp_now(ClockType.Precise);
}
}
// for tests only
internal static int NativeSize
{
get
{
return Native.gprsharp_sizeof_timespec();
}
}
// for tests only
internal static Timespec NativeInfFuture
{
get
{
return Native.gprsharp_inf_future(ClockType.Realtime);
}
}
// for tests only
public static Timespec NativeInfPast
{
get
{
return Native.gprsharp_inf_past(ClockType.Realtime);
}
}
}
}
| |
//
// Copyright (c) 2008-2011, Kenneth Bell
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the "Software"),
// to deal in the Software without restriction, including without limitation
// the rights to use, copy, modify, merge, publish, distribute, sublicense,
// and/or sell copies of the Software, and to permit persons to whom the
// Software is furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
// DEALINGS IN THE SOFTWARE.
//
using System;
using System.Globalization;
using System.IO;
using DiscUtils.Streams;
namespace DiscUtils.Ntfs
{
internal class BiosParameterBlock
{
public byte BiosDriveNumber; // Value: 0x80 (first hard disk)
public ushort BytesPerSector;
public byte ChkDskFlags; // Value: 0x00
public ushort FatRootEntriesCount; // Must be 0
public ushort FatSize16; // Must be 0
public uint HiddenSectors; // Value: 0x3F 0x00 0x00 0x00
public byte Media; // Must be 0xF8
public long MftCluster;
public long MftMirrorCluster;
public byte NumFats; // Must be 0
public ushort NumHeads; // Value: 0xFF 0x00
public string OemId;
public byte PaddingByte; // Value: 0x00
public byte RawIndexBufferSize;
public byte RawMftRecordSize;
public ushort ReservedSectors; // Must be 0
public byte SectorsPerCluster;
public ushort SectorsPerTrack; // Value: 0x3F 0x00
public byte SignatureByte; // Value: 0x80
public ushort TotalSectors16; // Must be 0
public uint TotalSectors32; // Must be 0
public long TotalSectors64;
public ulong VolumeSerialNumber;
public int BytesPerCluster
{
get { return BytesPerSector * SectorsPerCluster; }
}
public int IndexBufferSize
{
get { return CalcRecordSize(RawIndexBufferSize); }
}
public int MftRecordSize
{
get { return CalcRecordSize(RawMftRecordSize); }
}
public void Dump(TextWriter writer, string linePrefix)
{
writer.WriteLine(linePrefix + "BIOS PARAMETER BLOCK (BPB)");
writer.WriteLine(linePrefix + " OEM ID: " + OemId);
writer.WriteLine(linePrefix + " Bytes per Sector: " + BytesPerSector);
writer.WriteLine(linePrefix + " Sectors per Cluster: " + SectorsPerCluster);
writer.WriteLine(linePrefix + " Reserved Sectors: " + ReservedSectors);
writer.WriteLine(linePrefix + " # FATs: " + NumFats);
writer.WriteLine(linePrefix + " # FAT Root Entries: " + FatRootEntriesCount);
writer.WriteLine(linePrefix + " Total Sectors (16b): " + TotalSectors16);
writer.WriteLine(linePrefix + " Media: " + Media.ToString("X", CultureInfo.InvariantCulture) +
"h");
writer.WriteLine(linePrefix + " FAT size (16b): " + FatSize16);
writer.WriteLine(linePrefix + " Sectors per Track: " + SectorsPerTrack);
writer.WriteLine(linePrefix + " # Heads: " + NumHeads);
writer.WriteLine(linePrefix + " Hidden Sectors: " + HiddenSectors);
writer.WriteLine(linePrefix + " Total Sectors (32b): " + TotalSectors32);
writer.WriteLine(linePrefix + " BIOS Drive Number: " + BiosDriveNumber);
writer.WriteLine(linePrefix + " Chkdsk Flags: " + ChkDskFlags);
writer.WriteLine(linePrefix + " Signature Byte: " + SignatureByte);
writer.WriteLine(linePrefix + " Total Sectors (64b): " + TotalSectors64);
writer.WriteLine(linePrefix + " MFT Record Size: " + RawMftRecordSize);
writer.WriteLine(linePrefix + " Index Buffer Size: " + RawIndexBufferSize);
writer.WriteLine(linePrefix + " Volume Serial Number: " + VolumeSerialNumber);
}
internal static BiosParameterBlock Initialized(Geometry diskGeometry, int clusterSize, uint partitionStartLba,
long partitionSizeLba, int mftRecordSize, int indexBufferSize)
{
BiosParameterBlock bpb = new BiosParameterBlock();
bpb.OemId = "NTFS ";
bpb.BytesPerSector = Sizes.Sector;
bpb.SectorsPerCluster = (byte)(clusterSize / bpb.BytesPerSector);
bpb.ReservedSectors = 0;
bpb.NumFats = 0;
bpb.FatRootEntriesCount = 0;
bpb.TotalSectors16 = 0;
bpb.Media = 0xF8;
bpb.FatSize16 = 0;
bpb.SectorsPerTrack = (ushort)diskGeometry.SectorsPerTrack;
bpb.NumHeads = (ushort)diskGeometry.HeadsPerCylinder;
bpb.HiddenSectors = partitionStartLba;
bpb.TotalSectors32 = 0;
bpb.BiosDriveNumber = 0x80;
bpb.ChkDskFlags = 0;
bpb.SignatureByte = 0x80;
bpb.PaddingByte = 0;
bpb.TotalSectors64 = partitionSizeLba - 1;
bpb.RawMftRecordSize = bpb.CodeRecordSize(mftRecordSize);
bpb.RawIndexBufferSize = bpb.CodeRecordSize(indexBufferSize);
bpb.VolumeSerialNumber = GenSerialNumber();
return bpb;
}
internal static BiosParameterBlock FromBytes(byte[] bytes, int offset)
{
BiosParameterBlock bpb = new BiosParameterBlock();
bpb.OemId = EndianUtilities.BytesToString(bytes, offset + 0x03, 8);
bpb.BytesPerSector = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x0B);
bpb.SectorsPerCluster = bytes[offset + 0x0D];
bpb.ReservedSectors = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x0E);
bpb.NumFats = bytes[offset + 0x10];
bpb.FatRootEntriesCount = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x11);
bpb.TotalSectors16 = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x13);
bpb.Media = bytes[offset + 0x15];
bpb.FatSize16 = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x16);
bpb.SectorsPerTrack = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x18);
bpb.NumHeads = EndianUtilities.ToUInt16LittleEndian(bytes, offset + 0x1A);
bpb.HiddenSectors = EndianUtilities.ToUInt32LittleEndian(bytes, offset + 0x1C);
bpb.TotalSectors32 = EndianUtilities.ToUInt32LittleEndian(bytes, offset + 0x20);
bpb.BiosDriveNumber = bytes[offset + 0x24];
bpb.ChkDskFlags = bytes[offset + 0x25];
bpb.SignatureByte = bytes[offset + 0x26];
bpb.PaddingByte = bytes[offset + 0x27];
bpb.TotalSectors64 = EndianUtilities.ToInt64LittleEndian(bytes, offset + 0x28);
bpb.MftCluster = EndianUtilities.ToInt64LittleEndian(bytes, offset + 0x30);
bpb.MftMirrorCluster = EndianUtilities.ToInt64LittleEndian(bytes, offset + 0x38);
bpb.RawMftRecordSize = bytes[offset + 0x40];
bpb.RawIndexBufferSize = bytes[offset + 0x44];
bpb.VolumeSerialNumber = EndianUtilities.ToUInt64LittleEndian(bytes, offset + 0x48);
return bpb;
}
internal void ToBytes(byte[] buffer, int offset)
{
EndianUtilities.StringToBytes(OemId, buffer, offset + 0x03, 8);
EndianUtilities.WriteBytesLittleEndian(BytesPerSector, buffer, offset + 0x0B);
buffer[offset + 0x0D] = SectorsPerCluster;
EndianUtilities.WriteBytesLittleEndian(ReservedSectors, buffer, offset + 0x0E);
buffer[offset + 0x10] = NumFats;
EndianUtilities.WriteBytesLittleEndian(FatRootEntriesCount, buffer, offset + 0x11);
EndianUtilities.WriteBytesLittleEndian(TotalSectors16, buffer, offset + 0x13);
buffer[offset + 0x15] = Media;
EndianUtilities.WriteBytesLittleEndian(FatSize16, buffer, offset + 0x16);
EndianUtilities.WriteBytesLittleEndian(SectorsPerTrack, buffer, offset + 0x18);
EndianUtilities.WriteBytesLittleEndian(NumHeads, buffer, offset + 0x1A);
EndianUtilities.WriteBytesLittleEndian(HiddenSectors, buffer, offset + 0x1C);
EndianUtilities.WriteBytesLittleEndian(TotalSectors32, buffer, offset + 0x20);
buffer[offset + 0x24] = BiosDriveNumber;
buffer[offset + 0x25] = ChkDskFlags;
buffer[offset + 0x26] = SignatureByte;
buffer[offset + 0x27] = PaddingByte;
EndianUtilities.WriteBytesLittleEndian(TotalSectors64, buffer, offset + 0x28);
EndianUtilities.WriteBytesLittleEndian(MftCluster, buffer, offset + 0x30);
EndianUtilities.WriteBytesLittleEndian(MftMirrorCluster, buffer, offset + 0x38);
buffer[offset + 0x40] = RawMftRecordSize;
buffer[offset + 0x44] = RawIndexBufferSize;
EndianUtilities.WriteBytesLittleEndian(VolumeSerialNumber, buffer, offset + 0x48);
}
internal int CalcRecordSize(byte rawSize)
{
if ((rawSize & 0x80) != 0)
{
return 1 << -(sbyte)rawSize;
}
return rawSize * SectorsPerCluster * BytesPerSector;
}
private static ulong GenSerialNumber()
{
byte[] buffer = new byte[8];
Random rng = new Random();
rng.NextBytes(buffer);
return EndianUtilities.ToUInt64LittleEndian(buffer, 0);
}
private byte CodeRecordSize(int size)
{
if (size >= BytesPerCluster)
{
return (byte)(size / BytesPerCluster);
}
sbyte val = 0;
while (size != 1)
{
size = (size >> 1) & 0x7FFFFFFF;
val++;
}
return (byte)-val;
}
}
}
| |
//-----------------------------------------------------------------------------
// Copyright (c) Microsoft Corporation. All rights reserved.
//-----------------------------------------------------------------------------
namespace Microsoft.InfoCards.Diagnostics
{
using System;
using System.Xml;
using System.Collections.Generic;
using System.Diagnostics;
using System.Globalization;
using System.ComponentModel; //win32exception
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Runtime.ConstrainedExecution;
using Microsoft.Win32.SafeHandles;
using System.Security;
using System.Security.Principal;
using System.Runtime;
using System.ServiceModel.Diagnostics;
using System.Threading;
//
// For InfoCardBaseException
//
using System.IdentityModel.Selectors;
// Summary
// InfoCardTrace is the main driver class for the managed tracing infrastructure.
// Essentially it is a wrapper over the Indigo DiagnosticsAndTracing classes.
// Externally a facade of simple TraceXXXX calls is provided which
// internally thunk across to the indigo classes to perform the work.
//
// The trace class also provides support for flowing of correlation ids allowing
// tracing of requests across process and managed / unmanaged boundaries
// See the Infocard Tracing documentation at http://team/sites/infocard for
// detail on configuration and usage.
//
// Remarks
// All functions are thread safe
//
// Example usage looks like:
// using IDT=Microsoft.InfoCards.Diagnostics.InfoCardTrace
// IDT.TraceVerbose( InfoCardTraceCode.StoreInvalidKey, myKey );
// IDT.TraceDebug( "Got an infocard {0} with name {1}", card, card.Name );
//
//
static class InfoCardTrace
{
static class TraceCode
{
public const int IdentityModelSelectors = 0xD0000;
public const int GeneralInformation = TraceCode.IdentityModelSelectors | 0X0001;
public const int StoreLoading = TraceCode.IdentityModelSelectors | 0X0002;
public const int StoreBeginTransaction = TraceCode.IdentityModelSelectors | 0X0003;
public const int StoreCommitTransaction = TraceCode.IdentityModelSelectors | 0X0004;
public const int StoreRollbackTransaction = TraceCode.IdentityModelSelectors | 0X0005;
public const int StoreClosing = TraceCode.IdentityModelSelectors | 0X0006;
public const int StoreFailedToOpenStore = TraceCode.IdentityModelSelectors | 0X0007;
public const int StoreSignatureNotValid = TraceCode.IdentityModelSelectors | 0X0008;
public const int StoreDeleting = TraceCode.IdentityModelSelectors | 0X0009;
}
static Dictionary<int, string> traceCodes = new Dictionary<int, string>(9)
{
{ TraceCode.GeneralInformation, "GeneralInformation" },
{ TraceCode.StoreLoading, "StoreLoading" },
{ TraceCode.StoreBeginTransaction, "StoreBeginTransaction" },
{ TraceCode.StoreCommitTransaction, "StoreCommitTransaction" },
{ TraceCode.StoreRollbackTransaction, "StoreRollbackTransaction" },
{ TraceCode.StoreClosing, "StoreClosing" },
{ TraceCode.StoreFailedToOpenStore, "StoreFailedToOpenStore" },
{ TraceCode.StoreSignatureNotValid, "StoreSignatureNotValid" },
{ TraceCode.StoreDeleting, "StoreDeleting" },
};
static string GetTraceString(int traceCode)
{
return traceCodes[traceCode];
}
static string GetMsdnTraceCode(int traceCode)
{
return LegacyDiagnosticTrace.GenerateMsdnTraceCode("System.IdentityModel.Selectors", GetTraceString(traceCode));
}
[DllImport("advapi32",
CharSet = CharSet.Unicode,
EntryPoint = "ReportEventW",
ExactSpelling = true,
SetLastError = true)]
private static extern bool ReportEvent([In] SafeHandle hEventLog,
[In] short type,
[In] ushort category,
[In] uint eventID,
[In] byte[] userSID,
[In] short numStrings,
[In] int dataLen,
[In] HandleRef strings,
[In] byte[] rawData);
//
// Summary:
// Provides a wrapper over a handle retrieved by RegisterEventSource
//
internal class SafeEventLogHandle : SafeHandle
{
[DllImport("advapi32",
CharSet = CharSet.Unicode,
EntryPoint = "RegisterEventSourceW",
ExactSpelling = true,
SetLastError = true)]
private static extern SafeEventLogHandle RegisterEventSource(string uncServerName, string sourceName);
[DllImport("advapi32",
CharSet = CharSet.Unicode,
EntryPoint = "DeregisterEventSource",
ExactSpelling = true,
SetLastError = true)]
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
private static extern bool DeregisterEventSource(IntPtr eventLog);
public static SafeEventLogHandle Construct()
{
SafeEventLogHandle h = RegisterEventSource(null, InfoCardTrace.InfoCardEventSource);
if (null == h || h.IsInvalid)
{
int error = Marshal.GetLastWin32Error();
TraceDebug("failed to registereventsource with error {0}", error);
}
return h;
}
//
// Summary:
// Manages the lifetime of a native handle retrieved by register event source.
// Parameters:
// handle - the handle to wrap.
//
private SafeEventLogHandle()
: base(IntPtr.Zero, true)
{
}
public override bool IsInvalid
{
get
{
return (IntPtr.Zero == base.handle);
}
}
//
// Summary:
// Releases the eventlog handle.
//
protected override bool ReleaseHandle()
{
#pragma warning suppress 56523
return DeregisterEventSource(base.handle);
}
}
//
// Summary:
// Returns whether the current exception is fatal.
// Notes:
// Currently this delegates to the code in ExceptionUtility.cs
//
[ReliabilityContract(Consistency.WillNotCorruptState, Cer.Success)]
public static bool IsFatal(Exception e)
{
return Fx.IsFatal(e);
}
public static TimerCallback ThunkCallback(TimerCallback callback)
{
return Fx.ThunkCallback(callback);
}
public static WaitCallback ThunkCallback(WaitCallback callback)
{
return Fx.ThunkCallback(callback);
}
public static void CloseInvalidOutSafeHandle(SafeHandle handle)
{
Utility.CloseInvalidOutSafeHandle(handle);
}
//
// The event source we log against. May need to be updated should our name change before rtm
//
const string InfoCardEventSource = "CardSpace 4.0.0.0";
//
// Summary:
// Writes an audit message to the application's event log
//
public static void Audit(EventCode code)
{
LogEvent(code, null, EventLogEntryType.Information);
}
public static void Audit(EventCode code, string message)
{
LogEvent(code, message, EventLogEntryType.Information);
}
public static void Assert(bool condition, string format, params object[] parameters)
{
if (condition)
{
return;
}
string message = format;
if (null != parameters && 0 != parameters.Length)
{
message = String.Format(CultureInfo.InvariantCulture, format, parameters);
}
TraceDebug("An assertion fired: {0}", message);
#if DEBUG
//
// Let DebugAssert handle this for us....
// If not in debugger, Assertion Failed: Abort=Quit, Retry=Debug, Ignore=Continue
// If in debugger, will hit a DebugBreak()
//
DiagnosticUtility.DebugAssert( false, message );
#else
//
// Retail assert failfasts service
//
FailFast(message);
#endif
}
[Conditional("DEBUG")]
public static void DebugAssert(bool condition, string format, params object[] parameters)
{
#if DEBUG
if (condition)
{
return;
}
string message = format;
if (null != parameters && 0 != parameters.Length)
{
message = String.Format( CultureInfo.InvariantCulture, format, parameters );
}
TraceDebug( "An assertion fired: {0}", message );
if (Debugger.IsAttached)
{
Debugger.Launch();
Debugger.Break();
}
DiagnosticUtility.DebugAssert( false, message );
FailFast( message );
#endif
}
//
// Facade functions to allow simple call semantics.
//
public static void FailFast(string message)
{
DiagnosticUtility.FailFast(message);
}
[Conditional("DEBUG")]
public static void TraceVerbose(int traceCode)
{
TraceInternal(TraceEventType.Verbose, traceCode, null);
}
[Conditional("DEBUG")]
public static void TraceVerbose(int traceCode, params object[] parameters)
{
TraceInternal(TraceEventType.Verbose, traceCode, parameters);
}
[Conditional("DEBUG")]
public static void TraceInfo(int traceCode)
{
TraceInternal(TraceEventType.Information, traceCode, null);
}
[Conditional("DEBUG")]
public static void TraceInfo(int traceCode, params object[] parameters)
{
TraceInternal(TraceEventType.Information, traceCode, parameters);
}
[Conditional("DEBUG")]
public static void TraceWarning(int traceCode)
{
TraceInternal(TraceEventType.Warning, traceCode, null);
}
[Conditional("DEBUG")]
public static void TraceWarning(int traceCode, params object[] parameters)
{
TraceInternal(TraceEventType.Warning, traceCode, parameters);
}
[Conditional("DEBUG")]
public static void TraceError(int traceCode)
{
TraceInternal(TraceEventType.Error, traceCode, null);
}
[Conditional("DEBUG")]
public static void TraceError(int traceCode, params object[] parameters)
{
TraceInternal(TraceEventType.Error, traceCode, parameters);
}
[Conditional("DEBUG")]
public static void TraceCritical(int traceCode)
{
TraceInternal(TraceEventType.Critical, traceCode, null);
}
[Conditional("DEBUG")]
public static void TraceCritical(int traceCode, params object[] parameters)
{
TraceInternal(TraceEventType.Critical, traceCode, parameters);
}
//
// Enable the setting of level explicitly.
//
[Conditional("DEBUG")]
public static void Trace(TraceEventType level, int traceCode)
{
TraceInternal(level, traceCode, null);
}
[Conditional("DEBUG")]
public static void Trace(TraceEventType level, int traceCode, params object[] parameters)
{
TraceInternal(level, traceCode, parameters);
}
//
// Summary
// DebugTrace is an additional level of tracing, intended for
// use by the devleopment team during the product development cycle.
// The trace funcitons need no localization and can be fed arbitrary strings as
// the format specifier.
//
// Remarks
// Will be turned off in RETAIL builds.
// All tracing is done at the VERBOSE level.
//
// Parameters
// format - a format string using the standard .net string format specifier syntax
// parameters - optional parmaters to be embedded in the format string.
//
[Conditional("DEBUG")]
public static void TraceDebug(string format, params object[] parameters)
{
#if DEBUG
if (DiagnosticUtility.ShouldTraceVerbose)
{
// Retrieve the string from resources and build the message.
//
string message = format;
if (null != parameters && 0 != parameters.Length)
{
message = String.Format( CultureInfo.InvariantCulture, format, parameters );
}
//
// If we were passed a null message, at least flag it
//
if (String.IsNullOrEmpty(message))
{
message = "NULL DEBUG TRACE MESSAGE!";
}
//
// Build a trace message conforming to the ETL trace schema and
// call down through the diagnostic support classes to trace the call.
//
InfoCardTraceRecord tr = new InfoCardTraceRecord(
GetTraceString(TraceCode.GeneralInformation),
message );
DiagnosticUtility.DiagnosticTrace.TraceEvent(
TraceEventType.Verbose,
TraceCode.GeneralInformation,
SR.GetString(GetTraceString(TraceCode.GeneralInformation)),
GetMsdnTraceCode(TraceCode.GeneralInformation),
tr, null, message);
}
#endif
}
[Conditional("DEBUG")]
public static void TraceDebug(string message)
{
#if DEBUG
if (DiagnosticUtility.ShouldTraceVerbose)
{
//
// If we were passed a null message, at least flag it
//
if (String.IsNullOrEmpty(message))
{
message = "NULL DEBUG TRACE MESSAGE!";
}
//
// Build a trace message conforming to the ETL trace schema and
// call down through the diagnostic support classes to trace the call.
//
InfoCardTraceRecord tr = new InfoCardTraceRecord(
GetTraceString(TraceCode.GeneralInformation),
message );
DiagnosticUtility.DiagnosticTrace.TraceEvent(
TraceEventType.Verbose,
TraceCode.GeneralInformation,
SR.GetString(GetTraceString(TraceCode.GeneralInformation)),
GetMsdnTraceCode(TraceCode.GeneralInformation),
tr, null, message);
}
#endif
}
//
// Summary:
// Logs the event for the appropriate infocard error code. This code should
// match the entries in messages,mc
// Parameters:
// code - the event code to log
// Notes:
// This code may need to be extended to support an array of string parameters. We will do this if our event
// log messages require it.
//
private static void LogEvent(EventCode code, string message, EventLogEntryType type)
{
using (SafeEventLogHandle handle = SafeEventLogHandle.Construct())
{
string parameter = message;
if (null != handle)
{
if (String.IsNullOrEmpty(parameter))
{
parameter = SR.GetString(SR.GeneralExceptionMessage);
}
//
// Report event expects a LPCTSTR* lpStrings. Use GCHandle, instead
// of writing code with unsafe because InfoCard client uses this
// and our client cannot contain any unsafe code.
//
//
// This is the array of LPCTSTRs
//
IntPtr[] stringRoots = new IntPtr[1];
//
// This is to pin the parameter string itself. Use an array here if you want more than 1 string
//
GCHandle stringParamHandle = new GCHandle();
//
// This is to pin the pointer to the array of LPCTSTRs
//
GCHandle stringsRootHandle = new GCHandle();
try
{
//
// Pin the IntPtrs (ie array of LPCTSTRs)
//
stringsRootHandle = GCHandle.Alloc(stringRoots, GCHandleType.Pinned);
//
// Pin the parameter string itself
//
stringParamHandle = GCHandle.Alloc(parameter, GCHandleType.Pinned);
//
// Give the intptr address of the pinned string
//
stringRoots[0] = stringParamHandle.AddrOfPinnedObject();
//
// From msdn: The interop marshaler passes only the handle [2nd arg to constructor in our case]
// to unmanaged code, and guarantees that the wrapper (passed as the first parameter
// to the constructor of the HandleRef) remains alive for the duration of the [PInvoke] call.
//
HandleRef data = new HandleRef(handle, stringsRootHandle.AddrOfPinnedObject());
SecurityIdentifier sid = WindowsIdentity.GetCurrent().User;
byte[] sidBA = new byte[sid.BinaryLength];
sid.GetBinaryForm(sidBA, 0);
if (!ReportEvent(
handle,
(short)type,
(ushort)InfoCardEventCategory.General,
(uint)code,
sidBA,
1,
0,
data,
null))
{
//
// Errors in the eventlog API should be ignored by applications
//
int error = Marshal.GetLastWin32Error();
TraceDebug("Failed to report the event with error {0}", error);
}
}
finally
{
if (stringsRootHandle.IsAllocated)
{
stringsRootHandle.Free();
}
if (stringParamHandle.IsAllocated)
{
stringParamHandle.Free();
}
}
}
}
}
public static void TraceAndLogException(Exception e)
{
bool shouldLog = false;
bool isInformational = false;
InfoCardBaseException ie = e as InfoCardBaseException;
//
// We only log if this is an infocard exception that hasnt been previous logged,
// and isnt the user cancelled exception.
//
if (null != ie && !(ie is UserCancelledException) && !ie.Logged)
{
shouldLog = true;
}
if (shouldLog)
{
//
// If this is the parent of a previously logged exception then log as
// informational.
// If one of the children is UserCancelled, don't log at all
//
Exception current = ie.InnerException;
while (null != current)
{
if (current is UserCancelledException)
{
shouldLog = false;
break;
}
else if (current is InfoCardBaseException)
{
if ((current as InfoCardBaseException).Logged)
{
isInformational = true;
}
}
current = current.InnerException;
}
}
if (shouldLog)
{
EventLogEntryType logType = isInformational ? EventLogEntryType.Information : EventLogEntryType.Error;
string message = ie.Message;
if (!isInformational)
{
message = BuildMessage(ie);
}
LogEvent((EventCode)ie.NativeHResult, message, logType);
}
TraceException(e);
}
private static string BuildMessage(InfoCardBaseException ie)
{
Exception ex = ie;
String errString = ex.Message + "\n";
if (null != ex.InnerException)
{
while (null != ex.InnerException)
{
errString += String.Format(System.Globalization.CultureInfo.CurrentUICulture,
SR.GetString(SR.InnerExceptionTraceFormat),
ex.InnerException.Message);
ex = ex.InnerException;
}
errString += String.Format(System.Globalization.CultureInfo.CurrentUICulture,
SR.GetString(SR.CallStackTraceFormat),
ie.ToString());
}
else
{
if (!String.IsNullOrEmpty(Environment.StackTrace))
{
errString += String.Format(System.Globalization.CultureInfo.CurrentUICulture,
SR.GetString(SR.CallStackTraceFormat),
Environment.StackTrace);
}
}
return errString;
}
//
// Summary:
// Logs a general exception in the event log
// Parameters:
// e - the exception to log.
//
[Conditional("DEBUG")]
public static void TraceException(Exception e)
{
Exception current = e;
int indent = 0;
while (null != current)
{
TraceDebug("{0}Exception: message={1}\n stack trace={2}",
new string(' ', indent * 2),
e.Message,
e.StackTrace);
current = current.InnerException;
indent++;
}
}
//
// Summary
// Throw an exception and log an error in the event log
//
public static Exception ThrowHelperError(Exception e)
{
TraceAndLogException(e);
return DiagnosticUtility.ExceptionUtility.ThrowHelperError(e);
}
//
// Summary
// Throw an exception but don't log in the event log
//
public static Exception ThrowHelperErrorWithNoLogging(Exception e)
{
return DiagnosticUtility.ExceptionUtility.ThrowHelperError(e);
}
//
// Summary
// Throw an exception and log a warning in the event log
//
public static Exception ThrowHelperWarning(Exception e)
{
TraceAndLogException(e);
return DiagnosticUtility.ExceptionUtility.ThrowHelperWarning(e);
}
//
// Summary
// Throw an exception and log a critical event in the event log
//
public static Exception ThrowHelperCritical(Exception e)
{
TraceAndLogException(e);
return DiagnosticUtility.ExceptionUtility.ThrowHelperCritical(e);
}
//
// Summary:
// Throws an infocard argument exception. Currently mapped to a communication exception,
//
public static void ThrowInvalidArgumentConditional(bool condition, string argument)
{
if (condition)
{
string message = string.Format(
System.Globalization.CultureInfo.CurrentUICulture,
SR.GetString(SR.ServiceInvalidArgument),
argument);
throw ThrowHelperError(new InfoCardArgumentException(message));
}
}
//
// Summary
// Throw an ArgumentNullException and log an error in the event log
//
public static Exception ThrowHelperArgumentNull(string err)
{
return DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(err);
}
//
// Summary
// Throw an ArgumentException and log an error in the event log
//
public static Exception ThrowHelperArgument(string message)
{
return DiagnosticUtility.ExceptionUtility.ThrowHelperArgument(message);
}
//
// Summary
// Throw an ArgumentNullException and log an error in the event log
//
public static Exception ThrowHelperArgumentNull(string err, string message)
{
return DiagnosticUtility.ExceptionUtility.ThrowHelperArgumentNull(err, message);
}
//
// Summary
// The following series of calls enable finer grained control over tracing in the client
// All calls simply delegate down to the indigo DiagnosticTrace implementation which
// triggers it's behaviour based on the currently configured listeners.
//
// Remarks
// Typical usage is
// if( IDT.ShouldTraceVerbose() )
// {
// string toTrace = this.SafeDumpState();
// IDT.TraceVerbose( InfocardTraceCode.InfoCardCreated, toTrace );
// }
//
public static bool ShouldTrace(TraceEventType type)
{
return DiagnosticUtility.ShouldTrace(type);
}
public static bool ShouldTraceCritical
{
get { return DiagnosticUtility.ShouldTraceCritical; }
}
public static bool ShouldTraceError
{
get { return DiagnosticUtility.ShouldTraceError; }
}
public static bool ShouldTraceWarning
{
get { return DiagnosticUtility.ShouldTraceWarning; }
}
public static bool ShouldTraceInformation
{
get { return DiagnosticUtility.ShouldTraceInformation; }
}
public static bool ShouldTraceVerbose
{
get { return DiagnosticUtility.ShouldTraceVerbose; }
}
//
// Summary
// Expose the activity ids associated with the current flow of activity.
// ActivityIDs allow the correlation of events across process and managed / unmanaged bounda
// Normally they are managed implicitly. The .net runtime will ensure they flow across thread
// intra-process ( appdomain ) boundaries, and the indigo runtime will ensure they
// flow across indigo interactions ( cross process and cross machine ).
// We have a couple of responsibilities:
// When transitioning from mananged to unmanaged code:
// grab the activity id
// pass it across to native code through the activityID rpc parameter.
// When transitioning from unmanaged code
// call SetActivityId passing in the received id.
//
// Remarks
// Trace calls automatically attach the activityID on all calls.
//
public static Guid GetActivityId()
{
return System.Runtime.Diagnostics.DiagnosticTraceBase.ActivityId;
}
public static void SetActivityId(Guid activityId)
{
//
// This will trace by default at level verbose.
//
System.Runtime.Diagnostics.DiagnosticTraceBase.ActivityId = activityId;
}
//
// Summary
// The main trace function. Responsible for extracting the appropriate string
// from the application's resource file, formatting the string with the set of paramters
// if appropriate,
// and passing the request down to the IndigoDiagnostics classes.
//
// Parameters
// level - the level to trace at. verbose <= level <= critical
// code - the infocard trace code - a unique numeric / string identifier.
// parameters - an optional set of parameters used to supply additional diagnostic information
//
// Remarks
// Trace calls automatically attach the activityID on all calls.
//
[Conditional("DEBUG")]
private static void TraceInternal(
TraceEventType level,
int traceCode,
params object[] parameters)
{
#if DEBUG
if (DiagnosticUtility.ShouldTrace(level))
{
//
// Retrieve the string from resources and build the message.
//
#if INFOCARD_CLIENT
string message = SR.GetString(GetTraceString(traceCode));
#else
string message = SR.GetString(traceCode);
#endif
Assert( !String.IsNullOrEmpty( message ), "resource string lookup failed!!!" );
if (!String.IsNullOrEmpty( message ) && null != parameters)
{
try
{
message = String.Format(
System.Globalization.CultureInfo.CurrentUICulture,
message,
parameters );
}
catch (FormatException f)
{
Assert( false, "Invalid format: " + traceCode );
TraceException( f );
message = SR.GetString( SR.GeneralTraceMessage, traceCode );
}
}
//
// Build a trace message conforming to the ETL trace schema and
// call down through the diagnostic support classes to trace the call.
//
DiagnosticUtility.DiagnosticTrace.TraceEvent( level,
traceCode,
SR.GetString(GetTraceString(traceCode)),
GetMsdnTraceCode(TraceCode.GeneralInformation),
new InfoCardTraceRecord( GetTraceString(traceCode), message ), null, message);
}
#endif
}
}
}
| |
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using Agdur.Abstractions;
using Agdur.Tests.Utilities;
using Xunit;
namespace Agdur.Tests
{
public class BenchmarkTests
{
[Fact]
public void Should_be_able_to_benchmark_as_baseline_using_writer()
{
var writer = new StringWriter();
Benchmark.This(() => new object()).AsBaseline<BenchmarkProfile>(writer);
string result = writer.ToString();
Console.WriteLine(result);
}
[Fact]
public void Should_be_able_to_set_custom_benchmark_strategy_provider()
{
bool wasCalled = false;
Benchmark.SetBenchmarkStrategyProvider(action =>
{
wasCalled = true;
return new DefaultBenchmarkStrategy(action);
});
Benchmark.This(() => new object());
wasCalled.ShouldBeTrue();
}
public class BenchmarkProfile : IBenchmarkProfile
{
public IBenchmarkBuilderContinutation Define(IBenchmarkBuilder builder)
{
return builder.Times(10).Average().InMilliseconds();
}
}
}
public class Should_be_able_to_benchmark_with
{
private readonly IBenchmarkBuilderWithSyntax<IBenchmarkBuilderContinutation> builder =
Benchmark.This(() => new object()).Times(1);
[Fact]
public void Custom_metric()
{
builder.WithCustom(new MultipleValueMetric("custom", data => data));
}
[Fact]
public void Custom_simplified_single_metric()
{
builder.WithCustom("custom", data => data.Sum());
}
[Fact]
public void Custom_simplified_multiple_metric()
{
builder.WithCustom("custom", data => data);
}
[Fact]
public void Average()
{
builder.Average();
}
[Fact]
public void First()
{
builder.First(1);
}
[Fact]
public void Max()
{
builder.Max();
}
[Fact]
public void Min()
{
builder.Min();
}
[Fact]
public void Total()
{
builder.Total();
}
}
public class Should_be_able_to_benchmark_in
{
private readonly IBenchmarkBuilderInSyntax<IBenchmarkBuilderContinutation> builder =
Benchmark.This(() => new object()).Times(1).WithCustom(new MultipleValueMetric("custom", data => data));
[Fact]
public void Custom_time()
{
builder.InCustom(sample => sample.Seconds, "s");
}
[Fact]
public void Milliseconds()
{
builder.InMilliseconds();
}
[Fact]
public void Ticks()
{
builder.InTicks();
}
}
public class Should_be_able_to_benchmark_once_in
{
private readonly IBenchmarkBuilderInSyntax<ISingleBenchmarkBuilderContinuation> builder =
Benchmark.This(() => new object()).Once().Value();
[Fact]
public void Custom_time()
{
builder.InCustom(sample => sample.Seconds, "s");
}
[Fact]
public void Milliseconds()
{
builder.InMilliseconds();
}
[Fact]
public void Ticks()
{
builder.InTicks();
}
}
public class Should_be_able_to_benchmark_to
{
private readonly IBenchmarkBuilderContinutation builder =
Benchmark.This(() => new object()).Times(10).WithCustom(new SingleValueMetric("custom", data => data.Sum())).InCustom(sample => sample.Seconds, "s");
[Fact]
public void Custom()
{
builder.ToCustom(new StringWriter());
}
[Fact]
public void Console()
{
builder.ToConsole();
}
[Fact]
public void Path()
{
builder.ToPath("filename");
}
}
public class Should_be_able_to_benchmark_as
{
private readonly IBenchmarkBuilderAsSyntax builder =
Benchmark.This(() => new object()).Times(10).WithCustom(new SingleValueMetric("custom", data => data.Sum())).InCustom(sample => sample.Seconds, "s").ToConsole();
[Fact]
public void Custom()
{
builder.AsCustom(new CustomOutputStrategy());
}
[Fact]
public void FormattedString()
{
builder.AsFormattedString();
}
[Fact]
public void Xml()
{
builder.AsXml();
}
public class CustomOutputStrategy : IOutputStrategy
{
public void Execute(TextWriter writer, IList<IMetric> metrics) { }
}
}
}
| |
// ********************************************************************************************************
// Product Name: DotSpatial.dll Alpha
// Description: A library module for the DotSpatial geospatial framework for .Net.
// ********************************************************************************************************
// The contents of this file are subject to the MIT License (MIT)
// you may not use this file except in compliance with the License. You may obtain a copy of the License at
// http://dotspatial.codeplex.com/license
//
// Software distributed under the License is distributed on an "AS IS" basis, WITHOUT WARRANTY OF
// ANY KIND, either expressed or implied. See the License for the specific language governing rights and
// limitations under the License.
//
// The Original Code is from MapWindow.dll version 6.0
//
// The Initial Developer of this Original Code is Ted Dunsford. Created 12/2/2008 9:28:05 AM
//
// Contributor(s): (Open source contributors should list themselves and their modifications here).
//
// ********************************************************************************************************
using System;
using System.ComponentModel;
using System.Drawing;
using System.Drawing.Drawing2D;
using System.Windows.Forms;
namespace DotSpatial.Data.Forms
{
/// <summary>
/// ScrollingControl that provides autoscroll and custom draw that won't crash mono
/// </summary>
[ToolboxItem(false)]
public class ScrollingControl : Control
{
#region Events
/// <summary>
/// Occurs after the base drawing content has been rendered to the page.
/// </summary>
public event EventHandler<PaintEventArgs> Initialized;
#endregion
#region Private Variables
private readonly Brush _controlBrush;
private Brush _backImageBrush;
private Brush _backcolorBrush;
private Rectangle _controlRectangle;
private Rectangle _documentRectangle;
private bool _firstDrawing;
private bool _isInitialized;
private Bitmap _page; // the page is always the size of the control
private Size _pageSize;
private bool _resetOnResize;
private Label lblCorner;
private HScrollBar scrHorizontal;
private VScrollBar scrVertical;
#endregion
#region Constructors
/// <summary>
/// Creates a new instance of ScrollingControl
/// </summary>
public ScrollingControl()
{
InitializeComponent();
_backcolorBrush = new SolidBrush(base.BackColor);
_controlBrush = new SolidBrush(SystemColors.Control);
if (base.BackgroundImage != null)
{
_backImageBrush = new TextureBrush(base.BackgroundImage);
}
base.MinimumSize = new Size(5, 5);
}
#endregion
private void InitializeComponent()
{
scrVertical = new VScrollBar();
scrHorizontal = new HScrollBar();
lblCorner = new Label();
SuspendLayout();
//
// scrVertical
//
scrVertical.Anchor = (AnchorStyles.Top | AnchorStyles.Bottom)
| AnchorStyles.Right;
scrVertical.Location = new Point(170, 0);
scrVertical.Name = "scrVertical";
scrVertical.Size = new Size(17, 411);
scrVertical.TabIndex = 0;
scrVertical.Scroll += scrVertical_Scroll;
//
// scrHorizontal
//
scrHorizontal.Anchor = (AnchorStyles.Bottom | AnchorStyles.Left)
| AnchorStyles.Right;
scrHorizontal.Location = new Point(0, 411);
scrHorizontal.Name = "scrHorizontal";
scrHorizontal.Size = new Size(169, 17);
scrHorizontal.TabIndex = 1;
scrHorizontal.Scroll += scrHorizontal_Scroll;
//
// lblCorner
//
lblCorner.Anchor = AnchorStyles.Bottom | AnchorStyles.Right;
lblCorner.Location = new Point(169, 411);
lblCorner.AutoSize = false;
lblCorner.Text = null;
lblCorner.Size = new Size(18, 17);
lblCorner.BackColor = SystemColors.Control;
//
// ScrollingControl
//
Controls.Add(scrHorizontal);
Controls.Add(scrVertical);
Controls.Add(lblCorner);
Name = "ScrollingControl";
Size = new Size(187, 428);
ResumeLayout(false);
}
private void scrHorizontal_Scroll(object sender, ScrollEventArgs e)
{
OnVerticalScroll(sender, e);
}
private void scrVertical_Scroll(object sender, ScrollEventArgs e)
{
OnHorizontalScroll(sender, e);
}
/// <summary>
/// Occurs when scrolling vertically
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected virtual void OnVerticalScroll(object sender, ScrollEventArgs e)
{
_controlRectangle.X = scrHorizontal.Value;
Initialize();
Invalidate();
}
/// <summary>
/// Occurs when scrolling horizontally
/// </summary>
/// <param name="sender"></param>
/// <param name="e"></param>
protected virtual void OnHorizontalScroll(object sender, ScrollEventArgs e)
{
_controlRectangle.Y = scrVertical.Value;
Initialize();
Invalidate();
}
#region Methods
/// <summary>
/// Gets a rectangle in document coordinates for hte specified rectangle in client coordinates
/// </summary>
/// <param name="rect"></param>
/// <returns></returns>
public Rectangle ClientToDocument(Rectangle rect)
{
return new Rectangle(rect.X + _controlRectangle.X, rect.Y + _controlRectangle.Y, rect.Width, rect.Height);
}
/// <summary>
/// Translates a rectangle from document coordinates to coordinates relative to the client control
/// </summary>
/// <param name="rect"></param>
/// <returns></returns>
public Rectangle DocumentToClient(Rectangle rect)
{
return new Rectangle(rect.X - _controlRectangle.X, rect.Y - _controlRectangle.Y, rect.Width, rect.Height);
}
/// <summary>
/// Recalculates the size and visibility of the scroll bars based on the current document.
/// </summary>
public void ResetScroll()
{
_controlRectangle.Width = ClientRectangle.Width;
_controlRectangle.Height = ClientRectangle.Height;
int dw = _documentRectangle.Width;
int dh = _documentRectangle.Height;
int cw = Width;
int ch = Height;
if (dw == 0 || dh == 0) return; // prevent divide by 0
if (cw == 0 || ch == 0) return;
scrHorizontal.LargeChange = (cw * cw) / dw;
scrVertical.LargeChange = (ch * ch) / dh;
scrHorizontal.Maximum = dw;
scrVertical.Maximum = dh;
if (dw <= cw)
{
scrHorizontal.Visible = false;
}
else
{
if (scrHorizontal.Enabled) scrHorizontal.Visible = true;
}
if (dh <= ch)
{
scrVertical.Visible = false;
}
else
{
if (scrVertical.Enabled) scrVertical.Visible = true;
}
lblCorner.Visible = scrVertical.Visible || scrHorizontal.Visible;
}
#endregion
#region Properties
/// <summary>
/// Gets or sets the background color to use for this control
/// </summary>
public override Color BackColor
{
get
{
return base.BackColor;
}
set
{
if (_backcolorBrush != null) _backcolorBrush.Dispose();
_backcolorBrush = new SolidBrush(value);
base.BackColor = value;
}
}
/// <summary>
///
/// </summary>
public override Image BackgroundImage
{
get
{
return base.BackgroundImage;
}
set
{
base.BackgroundImage = value;
if (_backImageBrush != null) _backImageBrush.Dispose();
if (value == null) return;
_backImageBrush = new TextureBrush(BackgroundImage);
Size s = _pageSize;
if (s.Width < BackgroundImage.Width) s.Width = BackgroundImage.Width;
if (s.Height < BackgroundImage.Height) s.Height = BackgroundImage.Height;
_pageSize = s;
}
}
/// <summary>
/// Gets the rectangular region of the control in page coordinates.
/// </summary>
public Rectangle ControlRectangle
{
get { return _controlRectangle; }
set { _controlRectangle = value; }
}
/// <summary>
/// Gets or sets the rectangle for the entire content, whether on the page buffer or not. X and Y for this
/// are always 0.
/// </summary>
public virtual Rectangle DocumentRectangle
{
get { return _documentRectangle; }
set { _documentRectangle = value; }
}
/// <summary>
/// Gets or sets whether or not the page for this control has been drawn.
/// </summary>
public bool IsInitialized
{
get { return _isInitialized; }
set { _isInitialized = value; }
}
/// <summary>
/// Gets or sets a boolean indicating whether or not horizontal scrolling is enabled
/// </summary>
public bool HorizontalScrollEnabled
{
get { return scrHorizontal.Enabled; }
set { scrHorizontal.Enabled = value; }
}
/// <summary>
/// Gets or sets the page image being used as a buffer. This is useful
/// for content changes that need to be made rapidly. First refresh
/// a small region of this page, and then invalidate the client rectangle.
/// </summary>
[DesignerSerializationVisibility(DesignerSerializationVisibility.Hidden)]
public Bitmap Page
{
get { return _page; }
set { _page = value; }
}
/// <summary>
/// Gets or sets a boolean that indicates whether or not the scrolling
/// should be reset on every resize or not.
/// </summary>
public bool ResetOnResize
{
get { return _resetOnResize; }
set { _resetOnResize = value; }
}
/// <summary>
/// Gets or sets a boolean indicating whether the vertical scroll should be permitted
/// </summary>
public bool VerticalScrollEnabled
{
get { return scrVertical.Enabled; }
set { scrVertical.Enabled = value; }
}
#endregion
#region Protected Methods
/// <summary>
/// Prevent flicker by preventing this
/// </summary>
/// <param name="pevent"></param>
protected override void OnPaintBackground(PaintEventArgs pevent)
{
// Do Nothing
}
/// <summary>
/// On Paint only paints the specified clip rectangle, but paints
/// it from the page buffer.
/// </summary>
/// <param name="e"></param>
protected override void OnPaint(PaintEventArgs e)
{
Rectangle clip = e.ClipRectangle;
if (clip.IsEmpty) clip = ClientRectangle;
if (IsInitialized == false || _page == null)
{
Initialize(); // redraw the entire page buffer if necessary
}
using (var buffer = new Bitmap(clip.Width, clip.Height))
using (var g = Graphics.FromImage(buffer))
using(var mat = new Matrix())
{
mat.Translate(-clip.X, -clip.Y); // draw in "client" coordinates
g.Transform = mat;
OnDraw(new PaintEventArgs(g, clip)); // draw content to the small temporary buffer.
e.Graphics.DrawImage(buffer, clip); // draw from our small, temporary buffer to the screen
}
}
/// <summary>
/// Occurs during custom drawing when erasing things
/// </summary>
/// <param name="e"></param>
protected virtual void OnDrawBackground(PaintEventArgs e)
{
// e.Graphics.FillRectangle(_backcolorBrush, e.ClipRectangle);
// e.Graphics.DrawImage(_page, e.ClipRectangle, ClientToPage(e.ClipRectangle), GraphicsUnit.Pixel);
}
/// <summary>
/// Occurs during custom drawing
/// </summary>
/// <param name="e"></param>
protected virtual void OnDraw(PaintEventArgs e)
{
if (_firstDrawing == false)
{
ResetScroll();
_firstDrawing = true;
}
e.Graphics.FillRectangle(_backcolorBrush, e.ClipRectangle); // in client coordinates, the clip-rectangle is the area to clear
e.Graphics.DrawImage(_page, e.ClipRectangle, e.ClipRectangle, GraphicsUnit.Pixel);
}
/// <summary>
/// Disposes the unmanaged memory objects and optionally disposes
/// the managed memory objects
/// </summary>
/// <param name="disposing"></param>
protected override void Dispose(bool disposing)
{
if (disposing)
{
if (_backcolorBrush != null) _backcolorBrush.Dispose();
if (_controlBrush != null) _controlBrush.Dispose();
if (_backImageBrush != null) _backImageBrush.Dispose();
if (_page != null) _page.Dispose();
}
base.Dispose(disposing);
}
/// <summary>
/// Fires the Initialized event
/// </summary>
/// <param name="e"></param>
protected virtual void OnInitialize(PaintEventArgs e)
{
if (Initialized != null) Initialized(this, e);
}
/// <summary>
///
/// </summary>
/// <param name="e"></param>
protected override void OnResize(EventArgs e)
{
ResetScroll();
base.OnResize(e);
}
#endregion
#region Private Methods
// Redraws the entire contents of the control, even if the clip rectangle is smaller.
private void Initialize()
{
if (_documentRectangle.IsEmpty)
{
_documentRectangle = ClientRectangle;
}
if (_controlRectangle.IsEmpty)
{
_controlRectangle = ClientRectangle;
}
else
{
_controlRectangle.Width = ClientRectangle.Width;
_controlRectangle.Height = ClientRectangle.Height;
}
_page = new Bitmap(Width, Height);
Graphics g = Graphics.FromImage(_page);
g.Clear(BackColor);
if (BackgroundImage != null)
{
if (BackgroundImageLayout == ImageLayout.None)
{
g.DrawImage(BackgroundImage, ClientRectangle, _controlRectangle, GraphicsUnit.Pixel);
}
if (BackgroundImageLayout == ImageLayout.Center)
{
int x = (Width - BackgroundImage.Width) / 2;
int y = (Height - BackgroundImage.Height) / 2;
g.DrawImage(BackgroundImage, new Point(x, y));
}
if (BackgroundImageLayout == ImageLayout.Stretch || BackgroundImageLayout == ImageLayout.Zoom)
{
g.DrawImage(BackgroundImage, ClientRectangle);
}
if (BackgroundImageLayout == ImageLayout.Tile)
{
//g.DrawImage(BackgroundImage, new Point(0, 0));
g.FillRectangle(_backImageBrush, ClientRectangle);
}
}
Matrix mat = g.Transform;
Matrix oldMat = g.Transform;
mat.Translate(-_controlRectangle.X, -_controlRectangle.Y);
g.Transform = mat;
OnInitialize(new PaintEventArgs(g, ClientRectangle));
g.Transform = oldMat;
g.Dispose();
}
#endregion
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Linq;
using System.Collections;
using System.IO;
using System.Runtime.InteropServices;
using Xunit;
namespace System.Security.Cryptography.X509Certificates.Tests
{
public static class CollectionTests
{
[Fact]
public static void X509CertificateCollectionsProperties()
{
IList ilist = new X509CertificateCollection();
Assert.False(ilist.IsSynchronized);
Assert.False(ilist.IsFixedSize);
Assert.False(ilist.IsReadOnly);
ilist = new X509Certificate2Collection();
Assert.False(ilist.IsSynchronized);
Assert.False(ilist.IsFixedSize);
Assert.False(ilist.IsReadOnly);
}
[Fact]
public static void X509CertificateCollectionConstructors()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
Assert.Equal(3, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c3, cc[2]);
X509CertificateCollection cc2 = new X509CertificateCollection(cc);
Assert.Equal(3, cc2.Count);
Assert.Same(c1, cc2[0]);
Assert.Same(c2, cc2[1]);
Assert.Same(c3, cc2[2]);
Assert.Throws<ArgumentNullException>(() => new X509CertificateCollection(new X509Certificate[] { c1, c2, null, c3 }));
}
}
[Fact]
public static void X509Certificate2CollectionConstructors()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
Assert.Equal(3, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c3, cc[2]);
X509Certificate2Collection cc2 = new X509Certificate2Collection(cc);
Assert.Equal(3, cc2.Count);
Assert.Same(c1, cc2[0]);
Assert.Same(c2, cc2[1]);
Assert.Same(c3, cc2[2]);
Assert.Throws<ArgumentNullException>(() => new X509Certificate2Collection(new X509Certificate2[] { c1, c2, null, c3 }));
using (X509Certificate c4 = new X509Certificate())
{
X509Certificate2Collection collection = new X509Certificate2Collection { c1, c2, c3 };
((IList)collection).Add(c4); // Add non-X509Certificate2 object
Assert.Throws<InvalidCastException>(() => new X509Certificate2Collection(collection));
}
}
}
[Fact]
public static void X509Certificate2CollectionEnumerator()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
object ignored;
X509Certificate2Enumerator e = cc.GetEnumerator();
for (int i = 0; i < 2; i++)
{
// Not started
Assert.Throws<InvalidOperationException>(() => ignored = e.Current);
Assert.True(e.MoveNext());
Assert.Same(c1, e.Current);
Assert.True(e.MoveNext());
Assert.Same(c2, e.Current);
Assert.True(e.MoveNext());
Assert.Same(c3, e.Current);
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
// Ended
Assert.Throws<InvalidOperationException>(() => ignored = e.Current);
e.Reset();
}
IEnumerator e2 = cc.GetEnumerator();
TestNonGenericEnumerator(e2, c1, c2, c3);
IEnumerator e3 = ((IEnumerable)cc).GetEnumerator();
TestNonGenericEnumerator(e3, c1, c2, c3);
}
}
[Fact]
public static void X509CertificateCollectionEnumerator()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
object ignored;
X509CertificateCollection.X509CertificateEnumerator e = cc.GetEnumerator();
for (int i = 0; i < 2; i++)
{
// Not started
Assert.Throws<InvalidOperationException>(() => ignored = e.Current);
Assert.True(e.MoveNext());
Assert.Same(c1, e.Current);
Assert.True(e.MoveNext());
Assert.Same(c2, e.Current);
Assert.True(e.MoveNext());
Assert.Same(c3, e.Current);
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
// Ended
Assert.Throws<InvalidOperationException>(() => ignored = e.Current);
e.Reset();
}
IEnumerator e2 = cc.GetEnumerator();
TestNonGenericEnumerator(e2, c1, c2, c3);
IEnumerator e3 = ((IEnumerable)cc).GetEnumerator();
TestNonGenericEnumerator(e3, c1, c2, c3);
}
}
private static void TestNonGenericEnumerator(IEnumerator e, object c1, object c2, object c3)
{
object ignored;
for (int i = 0; i < 2; i++)
{
// Not started
Assert.Throws<InvalidOperationException>(() => ignored = e.Current);
Assert.True(e.MoveNext());
Assert.Same(c1, e.Current);
Assert.True(e.MoveNext());
Assert.Same(c2, e.Current);
Assert.True(e.MoveNext());
Assert.Same(c3, e.Current);
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
Assert.False(e.MoveNext());
// Ended
Assert.Throws<InvalidOperationException>(() => ignored = e.Current);
e.Reset();
}
}
[Fact]
public static void X509CertificateCollectionThrowsArgumentNullException()
{
using (X509Certificate certificate = new X509Certificate())
{
Assert.Throws<ArgumentNullException>(() => new X509CertificateCollection((X509Certificate[])null));
Assert.Throws<ArgumentNullException>(() => new X509CertificateCollection((X509CertificateCollection)null));
X509CertificateCollection collection = new X509CertificateCollection { certificate };
Assert.Throws<ArgumentNullException>(() => collection[0] = null);
Assert.Throws<ArgumentNullException>(() => collection.Add(null));
Assert.Throws<ArgumentNullException>(() => collection.AddRange((X509Certificate[])null));
Assert.Throws<ArgumentNullException>(() => collection.AddRange((X509CertificateCollection)null));
Assert.Throws<ArgumentNullException>(() => collection.CopyTo(null, 0));
Assert.Throws<ArgumentNullException>(() => collection.Insert(0, null));
Assert.Throws<ArgumentNullException>(() => collection.Remove(null));
IList ilist = (IList)collection;
Assert.Throws<ArgumentNullException>(() => ilist[0] = null);
Assert.Throws<ArgumentNullException>(() => ilist.Add(null));
Assert.Throws<ArgumentNullException>(() => ilist.CopyTo(null, 0));
Assert.Throws<ArgumentNullException>(() => ilist.Insert(0, null));
Assert.Throws<ArgumentNullException>(() => ilist.Remove(null));
}
Assert.Throws<ArgumentNullException>(() => new X509CertificateCollection.X509CertificateEnumerator(null));
}
[Fact]
public static void X509Certificate2CollectionThrowsArgumentNullException()
{
using (X509Certificate2 certificate = new X509Certificate2())
{
Assert.Throws<ArgumentNullException>(() => new X509Certificate2Collection((X509Certificate2[])null));
Assert.Throws<ArgumentNullException>(() => new X509Certificate2Collection((X509Certificate2Collection)null));
X509Certificate2Collection collection = new X509Certificate2Collection { certificate };
Assert.Throws<ArgumentNullException>(() => collection[0] = null);
Assert.Throws<ArgumentNullException>(() => collection.Add((X509Certificate)null));
Assert.Throws<ArgumentNullException>(() => collection.Add((X509Certificate2)null));
Assert.Throws<ArgumentNullException>(() => collection.AddRange((X509Certificate[])null));
Assert.Throws<ArgumentNullException>(() => collection.AddRange((X509CertificateCollection)null));
Assert.Throws<ArgumentNullException>(() => collection.AddRange((X509Certificate2[])null));
Assert.Throws<ArgumentNullException>(() => collection.AddRange((X509Certificate2Collection)null));
Assert.Throws<ArgumentNullException>(() => collection.CopyTo(null, 0));
Assert.Throws<ArgumentNullException>(() => collection.Insert(0, (X509Certificate)null));
Assert.Throws<ArgumentNullException>(() => collection.Insert(0, (X509Certificate2)null));
Assert.Throws<ArgumentNullException>(() => collection.Remove((X509Certificate)null));
Assert.Throws<ArgumentNullException>(() => collection.Remove((X509Certificate2)null));
Assert.Throws<ArgumentNullException>(() => collection.RemoveRange((X509Certificate2[])null));
Assert.Throws<ArgumentNullException>(() => collection.RemoveRange((X509Certificate2Collection)null));
Assert.Throws<ArgumentNullException>(() => collection.Import((byte[])null));
Assert.Throws<ArgumentNullException>(() => collection.Import((string)null));
IList ilist = (IList)collection;
Assert.Throws<ArgumentNullException>(() => ilist[0] = null);
Assert.Throws<ArgumentNullException>(() => ilist.Add(null));
Assert.Throws<ArgumentNullException>(() => ilist.CopyTo(null, 0));
Assert.Throws<ArgumentNullException>(() => ilist.Insert(0, null));
Assert.Throws<ArgumentNullException>(() => ilist.Remove(null));
}
}
[Fact]
public static void X509CertificateCollectionThrowsArgumentOutOfRangeException()
{
using (X509Certificate certificate = new X509Certificate())
{
X509CertificateCollection collection = new X509CertificateCollection { certificate };
Assert.Throws<ArgumentOutOfRangeException>(() => collection[-1]);
Assert.Throws<ArgumentOutOfRangeException>(() => collection[collection.Count]);
Assert.Throws<ArgumentOutOfRangeException>(() => collection[-1] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => collection[collection.Count] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => collection.Insert(-1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => collection.Insert(collection.Count + 1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => collection.RemoveAt(-1));
Assert.Throws<ArgumentOutOfRangeException>(() => collection.RemoveAt(collection.Count));
IList ilist = (IList)collection;
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[-1]);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[collection.Count]);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[-1] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[collection.Count] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.Insert(-1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.Insert(collection.Count + 1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.RemoveAt(-1));
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.RemoveAt(collection.Count));
}
}
[Fact]
public static void X509Certificate2CollectionThrowsArgumentOutOfRangeException()
{
using (X509Certificate2 certificate = new X509Certificate2())
{
X509Certificate2Collection collection = new X509Certificate2Collection { certificate };
Assert.Throws<ArgumentOutOfRangeException>(() => collection[-1]);
Assert.Throws<ArgumentOutOfRangeException>(() => collection[collection.Count]);
Assert.Throws<ArgumentOutOfRangeException>(() => collection[-1] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => collection[collection.Count] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => collection.Insert(-1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => collection.Insert(collection.Count + 1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => collection.RemoveAt(-1));
Assert.Throws<ArgumentOutOfRangeException>(() => collection.RemoveAt(collection.Count));
IList ilist = (IList)collection;
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[-1]);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[collection.Count]);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[-1] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist[collection.Count] = certificate);
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.Insert(-1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.Insert(collection.Count + 1, certificate));
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.RemoveAt(-1));
Assert.Throws<ArgumentOutOfRangeException>(() => ilist.RemoveAt(collection.Count));
}
}
[Fact]
public static void X509CertificateCollectionContains()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection collection = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
Assert.True(collection.Contains(c1));
Assert.True(collection.Contains(c2));
Assert.True(collection.Contains(c3));
Assert.False(collection.Contains(null));
IList ilist = (IList)collection;
Assert.True(ilist.Contains(c1));
Assert.True(ilist.Contains(c2));
Assert.True(ilist.Contains(c3));
Assert.False(ilist.Contains(null));
Assert.False(ilist.Contains("Bogus"));
}
}
[Fact]
public static void X509Certificate2CollectionContains()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection collection = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
Assert.True(collection.Contains(c1));
Assert.True(collection.Contains(c2));
Assert.True(collection.Contains(c3));
// Note: X509Certificate2Collection.Contains used to throw ArgumentNullException, but it
// has been deliberately changed to no longer throw to match the behavior of
// X509CertificateCollection.Contains and the IList.Contains implementation, which do not
// throw.
Assert.False(collection.Contains(null));
IList ilist = (IList)collection;
Assert.True(ilist.Contains(c1));
Assert.True(ilist.Contains(c2));
Assert.True(ilist.Contains(c3));
Assert.False(ilist.Contains(null));
Assert.False(ilist.Contains("Bogus"));
}
}
[Fact]
public static void X509CertificateCollectionEnumeratorModification()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
X509CertificateCollection.X509CertificateEnumerator e = cc.GetEnumerator();
cc.Add(c1);
// Collection changed.
Assert.Throws<InvalidOperationException>(() => e.MoveNext());
Assert.Throws<InvalidOperationException>(() => e.Reset());
}
}
[Fact]
public static void X509Certificate2CollectionEnumeratorModification()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
X509Certificate2Enumerator e = cc.GetEnumerator();
cc.Add(c1);
// Collection changed.
Assert.Throws<InvalidOperationException>(() => e.MoveNext());
Assert.Throws<InvalidOperationException>(() => e.Reset());
}
}
[Fact]
public static void X509CertificateCollectionAdd()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
{
X509CertificateCollection cc = new X509CertificateCollection();
int idx = cc.Add(c1);
Assert.Equal(0, idx);
Assert.Same(c1, cc[0]);
idx = cc.Add(c2);
Assert.Equal(1, idx);
Assert.Same(c2, cc[1]);
Assert.Throws<ArgumentNullException>(() => cc.Add(null));
IList il = new X509CertificateCollection();
idx = il.Add(c1);
Assert.Equal(0, idx);
Assert.Same(c1, il[0]);
idx = il.Add(c2);
Assert.Equal(1, idx);
Assert.Same(c2, il[1]);
Assert.Throws<ArgumentNullException>(() => il.Add(null));
}
}
[Fact]
public static void X509CertificateCollectionAsIList()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
{
IList il = new X509CertificateCollection();
il.Add(c1);
il.Add(c2);
Assert.Throws<ArgumentNullException>(() => il[0] = null);
string bogus = "Bogus";
Assert.Throws<ArgumentException>(() => il[0] = bogus);
Assert.Throws<ArgumentException>(() => il.Add(bogus));
Assert.Throws<ArgumentException>(() => il.Insert(0, bogus));
}
}
[Fact]
public static void AddDoesNotClone()
{
using (X509Certificate2 c1 = new X509Certificate2())
{
X509Certificate2Collection coll = new X509Certificate2Collection();
coll.Add(c1);
Assert.Same(c1, coll[0]);
}
}
[Fact]
public static void ImportStoreSavedAsCerData()
{
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(TestData.StoreSavedAsCerData);
int count = cc2.Count;
Assert.Equal(1, count);
using (X509Certificate2 c = cc2[0])
{
// pfxCer was loaded directly, cc2[0] was Imported, two distinct copies.
Assert.NotSame(pfxCer, c);
Assert.Equal(pfxCer, c);
Assert.Equal(pfxCer.Thumbprint, c.Thumbprint);
}
}
}
[Fact]
[PlatformSpecific(PlatformID.Windows)]
public static void ImportStoreSavedAsSerializedCerData_Windows()
{
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(TestData.StoreSavedAsSerializedCerData);
int count = cc2.Count;
Assert.Equal(1, count);
using (X509Certificate2 c = cc2[0])
{
// pfxCer was loaded directly, cc2[0] was Imported, two distinct copies.
Assert.NotSame(pfxCer, c);
Assert.Equal(pfxCer, c);
Assert.Equal(pfxCer.Thumbprint, c.Thumbprint);
}
}
}
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public static void ImportStoreSavedAsSerializedCerData_Unix()
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
Assert.ThrowsAny<CryptographicException>(() => cc2.Import(TestData.StoreSavedAsSerializedCerData));
Assert.Equal(0, cc2.Count);
}
[Fact]
[PlatformSpecific(PlatformID.Windows)]
public static void ImportStoreSavedAsSerializedStoreData_Windows()
{
using (var msCer = new X509Certificate2(TestData.MsCertificate))
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(TestData.StoreSavedAsSerializedStoreData);
int count = cc2.Count;
Assert.Equal(2, count);
X509Certificate2[] cs = cc2.ToArray().OrderBy(c => c.Subject).ToArray();
Assert.NotSame(msCer, cs[0]);
Assert.Equal(msCer, cs[0]);
Assert.Equal(msCer.Thumbprint, cs[0].Thumbprint);
Assert.NotSame(pfxCer, cs[1]);
Assert.Equal(pfxCer, cs[1]);
Assert.Equal(pfxCer.Thumbprint, cs[1].Thumbprint);
}
}
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public static void ImportStoreSavedAsSerializedStoreData_Unix()
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
Assert.ThrowsAny<CryptographicException>(() => cc2.Import(TestData.StoreSavedAsSerializedStoreData));
Assert.Equal(0, cc2.Count);
}
[Fact]
public static void ImportStoreSavedAsPfxData()
{
using (var msCer = new X509Certificate2(TestData.MsCertificate))
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(TestData.StoreSavedAsPfxData);
int count = cc2.Count;
Assert.Equal(2, count);
X509Certificate2[] cs = cc2.ToArray().OrderBy(c => c.Subject).ToArray();
Assert.NotSame(msCer, cs[0]);
Assert.Equal(msCer, cs[0]);
Assert.Equal(msCer.Thumbprint, cs[0].Thumbprint);
Assert.NotSame(pfxCer, cs[1]);
Assert.Equal(pfxCer, cs[1]);
Assert.Equal(pfxCer.Thumbprint, cs[1].Thumbprint);
}
}
[Fact]
public static void ImportInvalidData()
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
Assert.ThrowsAny<CryptographicException>(() => cc2.Import(new byte[] { 0, 1, 1, 2, 3, 5, 8, 13, 21 }));
}
[Fact]
public static void ImportFromFileTests()
{
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(Path.Combine("TestData" ,"My.pfx"), TestData.PfxDataPassword, X509KeyStorageFlags.DefaultKeySet);
int count = cc2.Count;
Assert.Equal(1, count);
using (X509Certificate2 c = cc2[0])
{
// pfxCer was loaded directly, cc2[0] was Imported, two distinct copies.
Assert.NotSame(pfxCer, c);
Assert.Equal(pfxCer, c);
Assert.Equal(pfxCer.Thumbprint, c.Thumbprint);
}
}
}
[Fact]
[ActiveIssue(2745, PlatformID.AnyUnix)]
public static void ImportMultiplePrivateKeysPfx()
{
var collection = new X509Certificate2Collection();
collection.Import(TestData.MultiPrivateKeyPfx);
Assert.Equal(2, collection.Count);
foreach (X509Certificate2 cert in collection)
{
Assert.True(cert.HasPrivateKey, "cert.HasPrivateKey");
}
}
[Fact]
public static void ExportCert()
{
TestExportSingleCert(X509ContentType.Cert);
}
[Fact]
[PlatformSpecific(PlatformID.Windows)]
public static void ExportSerializedCert_Windows()
{
TestExportSingleCert(X509ContentType.SerializedCert);
}
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public static void ExportSerializedCert_Unix()
{
using (var msCer = new X509Certificate2(TestData.MsCertificate))
using (var ecdsa256Cer = new X509Certificate2(TestData.ECDsa256Certificate))
{
X509Certificate2Collection cc = new X509Certificate2Collection(new[] { msCer, ecdsa256Cer });
Assert.Throws<PlatformNotSupportedException>(() => cc.Export(X509ContentType.SerializedCert));
}
}
[Fact]
[PlatformSpecific(PlatformID.Windows)]
public static void ExportSerializedStore_Windows()
{
TestExportStore(X509ContentType.SerializedStore);
}
[Fact]
[PlatformSpecific(PlatformID.AnyUnix)]
public static void ExportSerializedStore_Unix()
{
using (var msCer = new X509Certificate2(TestData.MsCertificate))
using (var ecdsa256Cer = new X509Certificate2(TestData.ECDsa256Certificate))
{
X509Certificate2Collection cc = new X509Certificate2Collection(new[] { msCer, ecdsa256Cer });
Assert.Throws<PlatformNotSupportedException>(() => cc.Export(X509ContentType.SerializedStore));
}
}
[Fact]
public static void ExportPkcs7()
{
TestExportStore(X509ContentType.Pkcs7);
}
[Fact]
public static void X509CertificateCollectionSyncRoot()
{
var cc = new X509CertificateCollection();
Assert.NotNull(((ICollection)cc).SyncRoot);
Assert.Same(((ICollection)cc).SyncRoot, ((ICollection)cc).SyncRoot);
}
[Fact]
public static void ExportEmpty_Cert()
{
var collection = new X509Certificate2Collection();
byte[] exported = collection.Export(X509ContentType.Cert);
Assert.Null(exported);
}
[Fact]
[ActiveIssue(2746, PlatformID.AnyUnix)]
public static void ExportEmpty_Pkcs12()
{
var collection = new X509Certificate2Collection();
byte[] exported = collection.Export(X509ContentType.Pkcs12);
// The empty PFX is legal, the answer won't be null.
Assert.NotNull(exported);
}
[Fact]
public static void ExportUnrelatedPfx()
{
// Export multiple certificates which are not part of any kind of certificate chain.
// Nothing in the PKCS12 structure requires they're related, but it might be an underlying
// assumption of the provider.
using (var cert1 = new X509Certificate2(TestData.MsCertificate))
using (var cert2 = new X509Certificate2(TestData.ComplexNameInfoCert))
using (var cert3 = new X509Certificate2(TestData.CertWithPolicies))
{
var collection = new X509Certificate2Collection
{
cert1,
cert2,
cert3,
};
byte[] exported = collection.Export(X509ContentType.Pkcs12);
var importedCollection = new X509Certificate2Collection();
importedCollection.Import(exported);
// Verify that the two collections contain the same certificates,
// but the order isn't really a factor.
Assert.Equal(collection.Count, importedCollection.Count);
// Compare just the subject names first, because it's the easiest thing to read out of the failure message.
string[] subjects = new string[collection.Count];
string[] importedSubjects = new string[collection.Count];
for (int i = 0; i < collection.Count; i++)
{
subjects[i] = collection[i].GetNameInfo(X509NameType.SimpleName, false);
importedSubjects[i] = importedCollection[i].GetNameInfo(X509NameType.SimpleName, false);
}
Assert.Equal(subjects, importedSubjects);
// But, really, the collections should be equivalent
// (after being coerced to IEnumerable<X509Certificate2>)
Assert.Equal(collection.OfType<X509Certificate2>(), importedCollection.OfType<X509Certificate2>());
}
}
[Fact]
public static void MultipleImport()
{
var collection = new X509Certificate2Collection();
collection.Import(Path.Combine("TestData", "DummyTcpServer.pfx"), null, default(X509KeyStorageFlags));
collection.Import(TestData.PfxData, TestData.PfxDataPassword, default(X509KeyStorageFlags));
Assert.Equal(3, collection.Count);
}
[Fact]
[ActiveIssue(2743, PlatformID.AnyUnix)]
[ActiveIssue(2885, PlatformID.Windows)]
public static void ExportMultiplePrivateKeys()
{
var collection = new X509Certificate2Collection();
collection.Import(Path.Combine("TestData", "DummyTcpServer.pfx"), null, X509KeyStorageFlags.Exportable);
collection.Import(TestData.PfxData, TestData.PfxDataPassword, X509KeyStorageFlags.Exportable);
// Pre-condition, we have multiple private keys
int originalPrivateKeyCount = collection.OfType<X509Certificate2>().Count(c => c.HasPrivateKey);
Assert.Equal(2, originalPrivateKeyCount);
// Export, re-import.
byte[] exported;
try
{
exported = collection.Export(X509ContentType.Pkcs12);
}
catch (PlatformNotSupportedException)
{
// [ActiveIssue(2743, PlatformID.AnyUnix)]
// Our Unix builds can't export more than one private key in a single PFX, so this is
// their exit point.
//
// If Windows gets here, or any exception other than PlatformNotSupportedException is raised,
// let that fail the test.
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows))
{
throw;
}
return;
}
// As the other half of issue 2743, if we make it this far we better be Windows (or remove the catch
// above)
Assert.True(RuntimeInformation.IsOSPlatform(OSPlatform.Windows), "RuntimeInformation.IsOSPlatform(OSPlatform.Windows)");
var importedCollection = new X509Certificate2Collection();
importedCollection.Import(exported);
Assert.Equal(collection.Count, importedCollection.Count);
int importedPrivateKeyCount = importedCollection.OfType<X509Certificate2>().Count(c => c.HasPrivateKey);
Assert.Equal(originalPrivateKeyCount, importedPrivateKeyCount);
}
[Fact]
public static void X509CertificateCollectionCopyTo()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
X509Certificate[] array1 = new X509Certificate[cc.Count];
cc.CopyTo(array1, 0);
Assert.Same(c1, array1[0]);
Assert.Same(c2, array1[1]);
Assert.Same(c3, array1[2]);
X509Certificate[] array2 = new X509Certificate[cc.Count];
((ICollection)cc).CopyTo(array2, 0);
Assert.Same(c1, array2[0]);
Assert.Same(c2, array2[1]);
Assert.Same(c3, array2[2]);
}
}
[Fact]
public static void X509CertificateCollectionIndexOf()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2 });
Assert.Equal(0, cc.IndexOf(c1));
Assert.Equal(1, cc.IndexOf(c2));
IList il = cc;
Assert.Equal(0, il.IndexOf(c1));
Assert.Equal(1, il.IndexOf(c2));
}
}
[Fact]
public static void X509CertificateCollectionRemove()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2 });
cc.Remove(c1);
Assert.Equal(1, cc.Count);
Assert.Same(c2, cc[0]);
cc.Remove(c2);
Assert.Equal(0, cc.Count);
Assert.Throws<ArgumentException>(() => cc.Remove(c2));
IList il = new X509CertificateCollection(new X509Certificate[] { c1, c2 });
il.Remove(c1);
Assert.Equal(1, il.Count);
Assert.Same(c2, il[0]);
il.Remove(c2);
Assert.Equal(0, il.Count);
Assert.Throws<ArgumentException>(() => il.Remove(c2));
}
}
[Fact]
public static void X509CertificateCollectionRemoveAt()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
cc.RemoveAt(0);
Assert.Equal(2, cc.Count);
Assert.Same(c2, cc[0]);
Assert.Same(c3, cc[1]);
cc.RemoveAt(1);
Assert.Equal(1, cc.Count);
Assert.Same(c2, cc[0]);
cc.RemoveAt(0);
Assert.Equal(0, cc.Count);
IList il = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
il.RemoveAt(0);
Assert.Equal(2, il.Count);
Assert.Same(c2, il[0]);
Assert.Same(c3, il[1]);
il.RemoveAt(1);
Assert.Equal(1, il.Count);
Assert.Same(c2, il[0]);
il.RemoveAt(0);
Assert.Equal(0, il.Count);
}
}
[Fact]
public static void X509Certificate2CollectionRemoveRangeArray()
{
using (X509Certificate2 c1 = new X509Certificate2(TestData.MsCertificate))
using (X509Certificate2 c2 = new X509Certificate2(TestData.DssCer))
using (X509Certificate2 c1Clone = new X509Certificate2(TestData.MsCertificate))
{
X509Certificate2[] array = new X509Certificate2[] { c1, c2 };
X509Certificate2Collection cc = new X509Certificate2Collection(array);
cc.RemoveRange(array);
Assert.Equal(0, cc.Count);
cc = new X509Certificate2Collection(array);
cc.RemoveRange(new X509Certificate2[] { c2, c1 });
Assert.Equal(0, cc.Count);
cc = new X509Certificate2Collection(array);
cc.RemoveRange(new X509Certificate2[] { c1 });
Assert.Equal(1, cc.Count);
Assert.Same(c2, cc[0]);
cc = new X509Certificate2Collection(array);
Assert.Throws<ArgumentNullException>(() => cc.RemoveRange(new X509Certificate2[] { c1, c2, null }));
Assert.Equal(2, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
cc = new X509Certificate2Collection(array);
Assert.Throws<ArgumentNullException>(() => cc.RemoveRange(new X509Certificate2[] { c1, null, c2 }));
Assert.Equal(2, cc.Count);
Assert.Same(c2, cc[0]);
Assert.Same(c1, cc[1]);
// Remove c1Clone (success)
// Remove c1 (exception)
// Add c1Clone back
// End state: { c1, c2 } => { c2, c1Clone }
cc = new X509Certificate2Collection(array);
Assert.Throws<ArgumentException>(() => cc.RemoveRange(new X509Certificate2[] { c1Clone, c1, c2 }));
Assert.Equal(2, cc.Count);
Assert.Same(c2, cc[0]);
Assert.Same(c1Clone, cc[1]);
}
}
[Fact]
public static void X509Certificate2CollectionRemoveRangeCollection()
{
using (X509Certificate2 c1 = new X509Certificate2(TestData.MsCertificate))
using (X509Certificate2 c2 = new X509Certificate2(TestData.DssCer))
using (X509Certificate2 c1Clone = new X509Certificate2(TestData.MsCertificate))
using (X509Certificate c3 = new X509Certificate())
{
X509Certificate2[] array = new X509Certificate2[] { c1, c2 };
X509Certificate2Collection cc = new X509Certificate2Collection(array);
cc.RemoveRange(new X509Certificate2Collection { c1, c2 });
Assert.Equal(0, cc.Count);
cc = new X509Certificate2Collection(array);
cc.RemoveRange(new X509Certificate2Collection { c2, c1 });
Assert.Equal(0, cc.Count);
cc = new X509Certificate2Collection(array);
cc.RemoveRange(new X509Certificate2Collection { c1 });
Assert.Equal(1, cc.Count);
Assert.Same(c2, cc[0]);
cc = new X509Certificate2Collection(array);
X509Certificate2Collection collection = new X509Certificate2Collection();
collection.Add(c1);
collection.Add(c2);
((IList)collection).Add(c3); // Add non-X509Certificate2 object
Assert.Throws<InvalidCastException>(() => cc.RemoveRange(collection));
Assert.Equal(2, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
cc = new X509Certificate2Collection(array);
collection = new X509Certificate2Collection();
collection.Add(c1);
((IList)collection).Add(c3); // Add non-X509Certificate2 object
collection.Add(c2);
Assert.Throws<InvalidCastException>(() => cc.RemoveRange(collection));
Assert.Equal(2, cc.Count);
Assert.Same(c2, cc[0]);
Assert.Same(c1, cc[1]);
// Remove c1Clone (success)
// Remove c1 (exception)
// Add c1Clone back
// End state: { c1, c2 } => { c2, c1Clone }
cc = new X509Certificate2Collection(array);
collection = new X509Certificate2Collection
{
c1Clone,
c1,
c2,
};
Assert.Throws<ArgumentException>(() => cc.RemoveRange(collection));
Assert.Equal(2, cc.Count);
Assert.Same(c2, cc[0]);
Assert.Same(c1Clone, cc[1]);
}
}
[Fact]
public static void X509CertificateCollectionIndexer()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
cc[0] = c3;
cc[1] = c2;
cc[2] = c1;
Assert.Same(c3, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c1, cc[2]);
IList il = cc;
il[0] = c1;
il[1] = c2;
il[2] = c3;
Assert.Same(c1, il[0]);
Assert.Same(c2, il[1]);
Assert.Same(c3, il[2]);
}
}
[Fact]
public static void X509Certificate2CollectionIndexer()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
cc[0] = c3;
cc[1] = c2;
cc[2] = c1;
Assert.Same(c3, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c1, cc[2]);
IList il = cc;
il[0] = c1;
il[1] = c2;
il[2] = c3;
Assert.Same(c1, il[0]);
Assert.Same(c2, il[1]);
Assert.Same(c3, il[2]);
}
}
[Fact]
public static void X509CertificateCollectionInsertAndClear()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection();
cc.Insert(0, c1);
cc.Insert(1, c2);
cc.Insert(2, c3);
Assert.Equal(3, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c3, cc[2]);
cc.Clear();
Assert.Equal(0, cc.Count);
cc.Add(c1);
cc.Add(c3);
Assert.Equal(2, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c3, cc[1]);
cc.Insert(1, c2);
Assert.Equal(3, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c3, cc[2]);
cc.Clear();
Assert.Equal(0, cc.Count);
IList il = cc;
il.Insert(0, c1);
il.Insert(1, c2);
il.Insert(2, c3);
Assert.Equal(3, il.Count);
Assert.Same(c1, il[0]);
Assert.Same(c2, il[1]);
Assert.Same(c3, il[2]);
il.Clear();
Assert.Equal(0, il.Count);
il.Add(c1);
il.Add(c3);
Assert.Equal(2, il.Count);
Assert.Same(c1, il[0]);
Assert.Same(c3, il[1]);
il.Insert(1, c2);
Assert.Equal(3, il.Count);
Assert.Same(c1, il[0]);
Assert.Same(c2, il[1]);
Assert.Same(c3, il[2]);
il.Clear();
Assert.Equal(0, il.Count);
}
}
[Fact]
public static void X509Certificate2CollectionInsert()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection();
cc.Insert(0, c3);
cc.Insert(0, c2);
cc.Insert(0, c1);
Assert.Equal(3, cc.Count);
Assert.Same(c1, cc[0]);
Assert.Same(c2, cc[1]);
Assert.Same(c3, cc[2]);
}
}
[Fact]
public static void X509Certificate2CollectionCopyTo()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
X509Certificate2[] array1 = new X509Certificate2[cc.Count];
cc.CopyTo(array1, 0);
Assert.Same(c1, array1[0]);
Assert.Same(c2, array1[1]);
Assert.Same(c3, array1[2]);
X509Certificate2[] array2 = new X509Certificate2[cc.Count];
((ICollection)cc).CopyTo(array2, 0);
Assert.Same(c1, array2[0]);
Assert.Same(c2, array2[1]);
Assert.Same(c3, array2[2]);
}
}
[Fact]
public static void X509CertificateCollectionGetHashCode()
{
using (X509Certificate c1 = new X509Certificate())
using (X509Certificate c2 = new X509Certificate())
using (X509Certificate c3 = new X509Certificate())
{
X509CertificateCollection cc = new X509CertificateCollection(new X509Certificate[] { c1, c2, c3 });
int expected = c1.GetHashCode() + c2.GetHashCode() + c3.GetHashCode();
Assert.Equal(expected, cc.GetHashCode());
}
}
[Fact]
public static void X509Certificate2CollectionGetHashCode()
{
using (X509Certificate2 c1 = new X509Certificate2())
using (X509Certificate2 c2 = new X509Certificate2())
using (X509Certificate2 c3 = new X509Certificate2())
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { c1, c2, c3 });
int expected = c1.GetHashCode() + c2.GetHashCode() + c3.GetHashCode();
Assert.Equal(expected, cc.GetHashCode());
}
}
private static void TestExportSingleCert(X509ContentType ct)
{
using (var msCer = new X509Certificate2(TestData.MsCertificate))
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { msCer, pfxCer });
byte[] blob = cc.Export(ct);
Assert.Equal(ct, X509Certificate2.GetCertContentType(blob));
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(blob);
int count = cc2.Count;
Assert.Equal(1, count);
using (X509Certificate2 c = cc2[0])
{
Assert.NotSame(msCer, c);
Assert.NotSame(pfxCer, c);
Assert.True(msCer.Equals(c) || pfxCer.Equals(c));
}
}
}
private static void TestExportStore(X509ContentType ct)
{
using (var msCer = new X509Certificate2(TestData.MsCertificate))
using (var pfxCer = new X509Certificate2(TestData.PfxData, TestData.PfxDataPassword))
{
X509Certificate2Collection cc = new X509Certificate2Collection(new X509Certificate2[] { msCer, pfxCer });
byte[] blob = cc.Export(ct);
Assert.Equal(ct, X509Certificate2.GetCertContentType(blob));
X509Certificate2Collection cc2 = new X509Certificate2Collection();
cc2.Import(blob);
int count = cc2.Count;
Assert.Equal(2, count);
X509Certificate2[] cs = cc2.ToArray().OrderBy(c => c.Subject).ToArray();
using (X509Certificate2 first = cs[0])
{
Assert.NotSame(msCer, first);
Assert.Equal(msCer, first);
}
using (X509Certificate2 second = cs[1])
{
Assert.NotSame(pfxCer, second);
Assert.Equal(pfxCer, second);
}
}
}
private static X509Certificate2[] ToArray(this X509Certificate2Collection col)
{
X509Certificate2[] array = new X509Certificate2[col.Count];
for (int i = 0; i < col.Count; i++)
{
array[i] = col[i];
}
return array;
}
}
}
| |
// Copyright (c) .NET Foundation. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for details.
using System;
using System.Diagnostics;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading;
using System.Windows.Forms;
namespace OpenLiveWriter.Localization
{
public class CultureHelper
{
public static bool GdiPlusLineCenteringBroken
{
get
{
return _gdiPlusLineCenteringBroken;
}
}
private static bool _gdiPlusLineCenteringBroken = false;
/// <summary>
/// Applies the given culture name to the current thread.
/// </summary>
/// <param name="cultureName"></param>
public static void ApplyUICulture(string cultureName)
{
if (cultureName == null)
{
throw new ArgumentNullException("cultureName");
}
CultureInfo culture = GetBestCulture(cultureName);
Thread.CurrentThread.CurrentUICulture = culture;
_gdiPlusLineCenteringBroken = CultureInfo.CurrentUICulture.ThreeLetterWindowsLanguageName == "CHT";
FixupDateTimeFormat();
}
private static void FixupDateTimeFormat()
{
if (Thread.CurrentThread.CurrentCulture.TwoLetterISOLanguageName.ToUpperInvariant() == "AR")
{
// Ensure that no spaces, slashes, or dashes will make the date not formmatted correctly by forcing all chars RTL
CultureInfo ci = (CultureInfo)Thread.CurrentThread.CurrentCulture.Clone();
ci.DateTimeFormat.ShortDatePattern = Regex.Replace(ci.DateTimeFormat.ShortDatePattern, "[Mdy]+", "\u200F$0");
Thread.CurrentThread.CurrentCulture = ci;
}
}
public static CultureInfo GetBestCulture(string cultureName)
{
try
{
// Dotnet won't load 'ml'
switch (cultureName.ToUpperInvariant())
{
case("ML"):
return CultureInfo.CreateSpecificCulture("ml-in");
case("PT"):
return CultureInfo.CreateSpecificCulture("pt-pt");
case ("SR-CYRL"):
return CultureInfo.CreateSpecificCulture("sr-cyrl-CS");
case ("SR-LATN-CS"):
try
{
return CultureInfo.CreateSpecificCulture(cultureName);
}
catch (ArgumentException)
{
return CultureInfo.CreateSpecificCulture("sr-sp-latn");
}
default:
return CultureInfo.CreateSpecificCulture(cultureName);
}
}
catch (ArgumentException)
{
// Specific culture didn't succeed, see if we can make
// a culture-neutral language identifier
int dashAt = cultureName.IndexOf('-');
if (dashAt >= 0)
{
try
{
return CultureInfo.CreateSpecificCulture(cultureName.Substring(0, dashAt));
}
catch (ArgumentException)
{
}
}
throw;
}
}
public static void FixupTextboxForNumber(TextBox textBox)
{
if (Thread.CurrentThread.CurrentCulture.TwoLetterISOLanguageName.ToUpperInvariant() == "HE")
{
textBox.RightToLeft = RightToLeft.No;
textBox.TextAlign = HorizontalAlignment.Right;
}
}
public static string GetDateTimeCombinedPattern(string date, string time)
{
// Simple way to control what comes first, date or time when displaying to the user
if (Thread.CurrentThread.CurrentUICulture.TwoLetterISOLanguageName.ToUpperInvariant() == "AR"
&& Thread.CurrentThread.CurrentCulture.TwoLetterISOLanguageName.ToUpperInvariant() == "AR")
{
return "\u200F" + time + " " + date;
}
else
{
return date + " " + time;
}
}
public static string GetShortDateTimePatternForDateTimePicker()
{
// DateTimPicker controls have a problem with RTL and custom formats. To get around this we hardcore the time in the reverse order.
if (Thread.CurrentThread.CurrentUICulture.TwoLetterISOLanguageName.ToUpperInvariant() == "AR"
&& Thread.CurrentThread.CurrentCulture.TwoLetterISOLanguageName.ToUpperInvariant() == "AR")
{
return "mm:hh";
}
else
{
return CultureInfo.CurrentCulture.DateTimeFormat.ShortTimePattern;
}
}
[Obsolete("NOT FULLY TESTED")]
public static bool IsImeActive(IntPtr windowHandle)
{
bool isActive = false;
try
{
IntPtr handle = Imm32.ImmGetContext(windowHandle);
if (handle == IntPtr.Zero)
return false;
try
{
isActive = Imm32.ImmGetOpenStatus(handle);
}
finally
{
Imm32.ImmReleaseContext(windowHandle, handle);
}
return isActive;
}
catch(Exception ex)
{
Trace.Fail("Failed to check if IME is active: " + ex);
return isActive;
}
}
[Obsolete("NOT FULLY TESTED")]
public static class Imm32
{
[DllImport("imm32.dll")]
public static extern IntPtr ImmGetContext(IntPtr hWnd);
[DllImport("imm32.dll")]
public static extern bool ImmGetOpenStatus(IntPtr hIMC);
[DllImport("imm32.dll")]
public static extern bool ImmReleaseContext(IntPtr hWnd, IntPtr hIMC);
}
public static bool IsRtlCodepage(uint codepage)
{
switch(codepage)
{
case 708:
case 720:
case 864:
case 1256:
case 10004:
case 20420:
case 28596:
case 862:
case 1255:
case 10005:
case 20424:
case 28598:
case 38598:
return true;
}
return false;
}
public static bool IsRtlLcid(int lcid)
{
return new CultureInfo(lcid).TextInfo.IsRightToLeft;
}
}
}
| |
//-----------------------------------------------------------------------------
// <copyright file="ThingManager.cs" company="WheelMUD Development Team">
// Copyright (c) WheelMUD Development Team. See LICENSE.txt. This file is
// subject to the Microsoft Public License. All other rights reserved.
// </copyright>
// <summary>
// High level manager that provides tracking and global collection of all Thing instances.
// Created: December 2010 by Karak, based on ItemManager as created January 2007 by Foxedup.
// </summary>
//-----------------------------------------------------------------------------
namespace WheelMUD.Core
{
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using WheelMUD.Interfaces;
/// <summary>High level manager that provides tracking and global collection of all Thing instances.</summary>
/// <remarks>
/// @@@ TODO: Provide search ability of registered Things through LINQ rather than,
/// or in addition to, specific-purpose search methods.
/// </remarks>
public class ThingManager : ManagerSystem
{
/// <summary>The singleton instance of this class.</summary>
private static readonly Lazy<ThingManager> SingletonInstance = new Lazy<ThingManager>(() => new ThingManager());
/// <summary>The dictionary of all managed Things.</summary>
private readonly ConcurrentDictionary<string, Thing> things = new ConcurrentDictionary<string, Thing>(StringComparer.OrdinalIgnoreCase);
/// <summary>Prevents a default instance of the <see cref="ThingManager"/> class from being created.</summary>
private ThingManager()
{
}
/// <summary>Gets the singleton <see cref="ThingManager"/> instance.</summary>
public static ThingManager Instance
{
get { return SingletonInstance.Value; }
}
/// <summary>Gets the collection of things.</summary>
public ICollection<Thing> Things
{
get { return this.things.Values.ToList().AsReadOnly(); }
}
/// <summary>Determines whether a Thing is currently spawned in the world given the item id.</summary>
/// <param name="thingID">The Thing ID to search for.</param>
/// <returns><c>true</c> if Thing is in the world; otherwise, <c>false</c>.</returns>
public bool IsThingInWorld(string thingID)
{
return this.things.ContainsKey(thingID);
}
/// <summary>Determines whether any Things match the specified condition.</summary>
/// <param name="condition">The condition.</param>
/// <returns>True if at least one Thing matches the condition; otherwise, false.</returns>
public bool Any(Func<Thing, bool> condition)
{
return this.things.Values.Any(condition);
}
/// <summary>Counts the number of Things matching the specified condition.</summary>
/// <param name="condition">The condition.</param>
/// <returns>The number of Things matching the specified condition.</returns>
public int Count(Func<Thing, bool> condition)
{
return this.things.Values.Count(condition);
}
/// <summary>Gets a Thing given a Thing ID.</summary>
/// <param name="thingID">The ID of the Thing to get.</param>
/// <returns>The <c>Thing</c> referenced by the given ID if found; otherwise <c>null</c></returns>
public Thing FindThing(string thingID)
{
Thing thing;
this.things.TryGetValue(thingID, out thing);
return thing;
}
/// <summary>Finds a thing using a name or part name.</summary>
/// <param name="name">The name of the thing to return.</param>
/// <param name="partialMatch">Used to indicate whether the search criteria can look at just the start of the name.</param>
/// <param name="ignoreCase">Set to true to ignore case for the search, false for case sensitive.</param>
/// <returns>The Thing found, or null if no matching Thing was found.</returns>
public Thing FindThingByName(string name, bool partialMatch = false, bool ignoreCase = false)
{
StringComparison comparison = ignoreCase
? StringComparison.CurrentCultureIgnoreCase
: StringComparison.CurrentCulture;
return partialMatch
? this.things.Values.FirstOrDefault(thing => thing.Name.StartsWith(name, ignoreCase, null))
: this.things.Values.FirstOrDefault(thing => string.Equals(thing.Name, name, comparison));
}
/// <summary>Retrieves a collection of things matching the specified condition.</summary>
/// <param name="condition">The condition.</param>
/// <returns>A collection of things matching the condition.</returns>
public IList<Thing> Find(Func<Thing, bool> condition)
{
return this.things.Values.Where(condition).ToList().AsReadOnly();
}
/// <summary>Tries to retrieve a Thing matching the specified condition.</summary>
/// <param name="condition">The condition.</param>
/// <returns>The first Thing matching the condition, or null if none was found.</returns>
public Thing FirstOrDefault(Func<Thing, bool> condition)
{
return this.things.Values.FirstOrDefault(condition);
}
/// <summary>Tries to move a Thing from its current location into the specified location, if that thing is movable.</summary>
/// <param name="thing">The Thing to move.</param>
/// <param name="destinationThing">The new container to house the Thing.</param>
/// <param name="goingVia">The going via.</param>
/// <param name="leavingMessage">The leaving message.</param>
/// <param name="arrivingMessage">The arriving message.</param>
public void MoveThing(Thing thing, Thing destinationThing, Thing goingVia, SensoryMessage leavingMessage, SensoryMessage arrivingMessage)
{
MovableBehavior movableBehavior = thing.Behaviors.FindFirst<MovableBehavior>();
if (movableBehavior != null)
{
movableBehavior.Move(destinationThing, goingVia, leavingMessage, arrivingMessage);
}
}
/// <summary>Destroys the specified Thing.</summary>
/// <param name="thing">The thing to destroy.</param>
/// <returns>True if the destruction was successful; otherwise, false.</returns>
public bool DestroyThing(Thing thing)
{
if (thing.Parent != null)
{
thing.Parent.Remove(thing);
}
Thing removedThing;
return this.things.TryRemove(thing.ID, out removedThing);
}
/// <summary>Calls <see cref="DestroyThing"/> on all things matching the specified condition.</summary>
/// <param name="condition">The condition.</param>
/// <returns>The number of things that were successfully destroyed.</returns>
public int Destroy(Func<Thing, bool> condition)
{
var thingsToRemove = this.things.Values.Where(condition);
return thingsToRemove.Count(thing => this.DestroyThing(thing));
}
/// <summary>Starts this system's individual components.</summary>
public override void Start()
{
}
/// <summary>Stops this system's individual components.</summary>
public override void Stop()
{
this.things.Clear();
}
/// <summary>Add or update the ThingManager's cache of Things; should be called when the ID of a Thing is established or changes.</summary>
/// <param name="oldId">The previous ID of the Thing, or 0 if it has not had an ID yet.</param>
/// <param name="newId">The new ID of the Thing.</param>
/// <param name="updatedThing">The updated thing.</param>
/// <returns>
/// True if the update was successful. If false, presumably another call won a race
/// and the caller should either update their reference or try again.
/// </returns>
internal bool UpdateThingRegistration(string oldId, string newId, Thing updatedThing)
{
Debug.Assert(oldId != newId, "UpdateThingRegistration should not be called when not changing the Thing ID.");
Debug.Assert(!string.IsNullOrEmpty(newId), "After initialization, a Thing's ID should never become null or empty!");
////Debug.Assert(!this.things.ContainsKey(newID), "A Thing has been assigned an ID which is not unique!");
if (!string.IsNullOrEmpty(oldId))
{
Thing removedThingOldId;
this.things.TryRemove(oldId, out removedThingOldId);
}
Thing removedThingNewId;
this.things.TryRemove(newId, out removedThingNewId);
return this.things.TryAdd(newId, updatedThing);
}
/// <summary>Exporter for MEF.</summary>
[ExportSystem]
public class ThingManagerExporter : SystemExporter
{
/// <summary>Gets the singleton system instance.</summary>
/// <returns>A new instance of the singleton system.</returns>
public override ISystem Instance
{
get { return ThingManager.Instance; }
}
/// <summary>Gets the Type of the singleton system, without instantiating it.</summary>
/// <returns>The Type of the singleton system.</returns>
public override Type SystemType
{
get { return typeof(ThingManager); }
}
}
}
}
| |
using System;
using Loon.Utils;
using System.Collections.Generic;
using Loon.Physics;
namespace Loon.Core.Geom {
public class Polygon : Shape {
private const long serialVersionUID = 1L;
public class Polygon2i {
public int npoints;
public int[] xpoints;
public int[] ypoints;
private const int MIN_LENGTH = 4;
public Polygon2i() {
xpoints = new int[MIN_LENGTH];
ypoints = new int[MIN_LENGTH];
}
public Polygon2i(int[] xpoints_0, int[] ypoints_1, int npoints_2) {
if (npoints_2 > xpoints_0.Length || npoints_2 > ypoints_1.Length) {
throw new IndexOutOfRangeException("npoints > xpoints.length || "
+ "npoints > ypoints.length".ToString());
}
if (npoints_2 < 0) {
throw new IndexOutOfRangeException("npoints < 0");
}
this.npoints = npoints_2;
this.xpoints = CollectionUtils.CopyOf(xpoints_0, npoints_2);
this.ypoints = CollectionUtils.CopyOf(ypoints_1, npoints_2);
}
public static int HighestOneBit(int i)
{
i |= (i >> 1);
i |= (i >> 2);
i |= (i >> 4);
i |= (i >> 8);
i |= (i >> 16);
return i - ((int)((uint)i >> 1));
}
public void AddPoint(int x, int y) {
if (npoints >= xpoints.Length || npoints >= ypoints.Length) {
int newLength = (npoints * 2);
if (newLength < MIN_LENGTH) {
newLength = MIN_LENGTH;
} else if ((newLength & (newLength - 1)) != 0) {
newLength = HighestOneBit(newLength);
}
xpoints = CollectionUtils.CopyOf(xpoints, newLength);
ypoints = CollectionUtils.CopyOf(ypoints, newLength);
}
xpoints[npoints] = x;
ypoints[npoints] = y;
npoints++;
}
public int[] GetVertices() {
int vertice_size = xpoints.Length * 2;
int[] verts = new int[vertice_size];
for (int i = 0, j = 0; i < vertice_size; i += 2, j++) {
verts[i] = xpoints[j];
verts[i + 1] = ypoints[j];
}
return verts;
}
public void Reset() {
npoints = 0;
xpoints = new int[MIN_LENGTH];
ypoints = new int[MIN_LENGTH];
}
}
private bool allowDups;
private bool closed;
public Polygon(float[] points) {
this.allowDups = false;
this.closed = true;
int length = points.Length;
this.points = new float[length];
maxX = -System.Single.MinValue;
maxY = -System.Single.MinValue;
minX = System.Single.MaxValue;
minY = System.Single.MaxValue;
x = System.Single.MaxValue;
y = System.Single.MaxValue;
for (int i = 0; i < length; i++) {
this.points[i] = points[i];
if (i % 2 == 0) {
if (points[i] > maxX) {
maxX = points[i];
}
if (points[i] < minX) {
minX = points[i];
}
if (points[i] < x) {
x = points[i];
}
} else {
if (points[i] > maxY) {
maxY = points[i];
}
if (points[i] < minY) {
minY = points[i];
}
if (points[i] < y) {
y = points[i];
}
}
}
FindCenter();
CalculateRadius();
pointsDirty = true;
}
public Polygon() {
this.allowDups = false;
this.closed = true;
points = new float[0];
maxX = -System.Single.MinValue;
maxY = -System.Single.MinValue;
minX = System.Single.MaxValue;
minY = System.Single.MaxValue;
}
public Polygon(float[] xpoints_0, float[] ypoints_1, int npoints_2) {
this.allowDups = false;
this.closed = true;
if (npoints_2 > xpoints_0.Length || npoints_2 > ypoints_1.Length) {
throw new IndexOutOfRangeException("npoints > xpoints.length || "
+ "npoints > ypoints.length".ToString());
}
if (npoints_2 < 0) {
throw new IndexOutOfRangeException("npoints < 0");
}
points = new float[0];
maxX = -System.Single.MinValue;
maxY = -System.Single.MinValue;
minX = System.Single.MaxValue;
minY = System.Single.MaxValue;
for (int i = 0; i < npoints_2; i++) {
AddPoint(xpoints_0[i], ypoints_1[i]);
}
}
public Polygon(int[] xpoints_0, int[] ypoints_1, int npoints_2) {
this.allowDups = false;
this.closed = true;
if (npoints_2 > xpoints_0.Length || npoints_2 > ypoints_1.Length) {
throw new IndexOutOfRangeException("npoints > xpoints.length || "
+ "npoints > ypoints.length".ToString());
}
if (npoints_2 < 0) {
throw new IndexOutOfRangeException("npoints < 0");
}
points = new float[0];
maxX = -System.Single.MinValue;
maxY = -System.Single.MinValue;
minX = System.Single.MaxValue;
minY = System.Single.MaxValue;
for (int i = 0; i < npoints_2; i++) {
AddPoint(xpoints_0[i], ypoints_1[i]);
}
}
public Point[] GetVertexs()
{
int size = GetPointCount();
Point[] verts = new Point[size / 2];
for (int i = 0, j = 0; i < size; i += 2, j++)
{
verts[j] = new Point(points[i], points[i + 1]);
}
return verts;
}
public PPolygon GetPPolygon(float scale)
{
return new PPolygon(points, scale);
}
public void SetAllowDuplicatePoints(bool allowDups_0) {
this.allowDups = allowDups_0;
}
public void AddPoint(float x, float y) {
if (HasVertex(x, y) && (!allowDups)) {
return;
}
List<Single> tempPoints = new List<Single>();
for (int i = 0; i < points.Length; i++) {
CollectionUtils.Add(tempPoints,points[i]);
}
CollectionUtils.Add(tempPoints, x);
CollectionUtils.Add(tempPoints, y);
int length = tempPoints.Count;
this.points = new float[length];
for (int i_0 = 0; i_0 < length; i_0++) {
points[i_0] = (tempPoints[i_0]);
}
if (x > maxX) {
maxX = x;
}
if (y > maxY) {
maxY = y;
}
if (x < minX) {
minX = x;
}
if (y < minY) {
minY = y;
}
FindCenter();
CalculateRadius();
pointsDirty = true;
}
public override Shape Transform(Matrix transform) {
CheckPoints();
Polygon resultPolygon = new Polygon();
float[] result = new float[points.Length];
transform.Transform(points, 0, result, 0, points.Length / 2);
resultPolygon.points = result;
resultPolygon.FindCenter();
resultPolygon.closed = closed;
return resultPolygon;
}
public override void SetX(float x) {
base.SetX(x);
pointsDirty = false;
}
public override void SetY(float y) {
base.SetY(y);
pointsDirty = false;
}
protected internal override void CreatePoints() {
}
public override bool Closed() {
return closed;
}
public void SetClosed(bool closed_0) {
this.closed = closed_0;
}
public Polygon Copy() {
float[] copyPoints = new float[points.Length];
System.Array.Copy((Array)(points),0,(Array)(copyPoints),0,copyPoints.Length);
return new Polygon(copyPoints);
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Win32.SafeHandles;
using System.Collections.Generic;
using System.ComponentModel;
using System.Globalization;
using System.IO;
using System.Runtime.InteropServices;
using System.Text;
using System.Threading;
namespace System.Diagnostics
{
public partial class Process : IDisposable
{
/// <summary>
/// Puts a Process component in state to interact with operating system processes that run in a
/// special mode by enabling the native property SeDebugPrivilege on the current thread.
/// </summary>
public static void EnterDebugMode()
{
// Nop.
}
/// <summary>
/// Takes a Process component out of the state that lets it interact with operating system processes
/// that run in a special mode.
/// </summary>
public static void LeaveDebugMode()
{
// Nop.
}
/// <summary>Stops the associated process immediately.</summary>
public void Kill()
{
EnsureState(State.HaveId);
int errno = Interop.Sys.Kill(_processId, Interop.Sys.Signals.SIGKILL);
if (errno != 0)
{
throw new Win32Exception(errno); // same exception as on Windows
}
}
/// <summary>Discards any information about the associated process.</summary>
private void RefreshCore()
{
// Nop. No additional state to reset.
}
/// <summary>Additional logic invoked when the Process is closed.</summary>
private void CloseCore()
{
if (_waitStateHolder != null)
{
_waitStateHolder.Dispose();
_waitStateHolder = null;
}
}
/// <summary>
/// Instructs the Process component to wait the specified number of milliseconds for the associated process to exit.
/// </summary>
private bool WaitForExitCore(int milliseconds)
{
bool exited = GetWaitState().WaitForExit(milliseconds);
Debug.Assert(exited || milliseconds != Timeout.Infinite);
if (exited && milliseconds == Timeout.Infinite) // if we have a hard timeout, we cannot wait for the streams
{
if (_output != null)
{
_output.WaitUtilEOF();
}
if (_error != null)
{
_error.WaitUtilEOF();
}
}
return exited;
}
/// <summary>Gets the main module for the associated process.</summary>
public ProcessModule MainModule
{
get
{
ProcessModuleCollection pmc = Modules;
return pmc.Count > 0 ? pmc[0] : null;
}
}
/// <summary>Checks whether the process has exited and updates state accordingly.</summary>
private void UpdateHasExited()
{
int? exitCode;
_exited = GetWaitState().GetExited(out exitCode);
if (_exited && exitCode != null)
{
_exitCode = exitCode.Value;
}
}
/// <summary>Gets the time that the associated process exited.</summary>
private DateTime ExitTimeCore
{
get { return GetWaitState().ExitTime; }
}
/// <summary>
/// Gets or sets a value indicating whether the associated process priority
/// should be temporarily boosted by the operating system when the main window
/// has focus.
/// </summary>
private bool PriorityBoostEnabledCore
{
get { return false; } //Nop
set { } // Nop
}
/// <summary>
/// Gets or sets the overall priority category for the associated process.
/// </summary>
private ProcessPriorityClass PriorityClassCore
{
// This mapping is relatively arbitrary. 0 is normal based on the man page,
// and the other values above and below are simply distributed evenly.
get
{
EnsureState(State.HaveId);
int pri = 0;
int errno = Interop.Sys.GetPriority(Interop.Sys.PriorityWhich.PRIO_PROCESS, _processId, out pri);
if (errno != 0)
{
throw new Win32Exception(errno); // match Windows exception
}
Debug.Assert(pri >= -20 && pri <= 20);
return
pri < -15 ? ProcessPriorityClass.RealTime :
pri < -10 ? ProcessPriorityClass.High :
pri < -5 ? ProcessPriorityClass.AboveNormal :
pri == 0 ? ProcessPriorityClass.Normal :
pri <= 10 ? ProcessPriorityClass.BelowNormal :
ProcessPriorityClass.Idle;
}
set
{
int pri;
switch (value)
{
case ProcessPriorityClass.RealTime: pri = -19; break;
case ProcessPriorityClass.High: pri = -11; break;
case ProcessPriorityClass.AboveNormal: pri = -6; break;
case ProcessPriorityClass.Normal: pri = 0; break;
case ProcessPriorityClass.BelowNormal: pri = 10; break;
case ProcessPriorityClass.Idle: pri = 19; break;
default: throw new Win32Exception(); // match Windows exception
}
int result = Interop.Sys.SetPriority(Interop.Sys.PriorityWhich.PRIO_PROCESS, _processId, pri);
if (result == -1)
{
throw new Win32Exception(); // match Windows exception
}
}
}
/// <summary>Gets the ID of the current process.</summary>
private static int GetCurrentProcessId()
{
return Interop.Sys.GetPid();
}
/// <summary>
/// Gets a short-term handle to the process, with the given access. If a handle exists,
/// then it is reused. If the process has exited, it throws an exception.
/// </summary>
private SafeProcessHandle GetProcessHandle()
{
if (_haveProcessHandle)
{
if (GetWaitState().HasExited)
{
throw new InvalidOperationException(SR.Format(SR.ProcessHasExited, _processId.ToString(CultureInfo.CurrentCulture)));
}
return _processHandle;
}
EnsureState(State.HaveId | State.IsLocal);
return new SafeProcessHandle(_processId);
}
/// <summary>Starts the process using the supplied start info.</summary>
/// <param name="startInfo">The start info with which to start the process.</param>
private bool StartCore(ProcessStartInfo startInfo)
{
// Resolve the path to the specified file name
string filename = ResolvePath(startInfo.FileName);
// Parse argv, envp, and cwd out of the ProcessStartInfo
string[] argv = CreateArgv(startInfo);
string[] envp = CreateEnvp(startInfo);
string cwd = !string.IsNullOrWhiteSpace(startInfo.WorkingDirectory) ? startInfo.WorkingDirectory : null;
// Invoke the shim fork/execve routine. It will create pipes for all requested
// redirects, fork a child process, map the pipe ends onto the appropriate stdin/stdout/stderr
// descriptors, and execve to execute the requested process. The shim implementation
// is used to fork/execve as executing managed code in a forked process is not safe (only
// the calling thread will transfer, thread IDs aren't stable across the fork, etc.)
int childPid, stdinFd, stdoutFd, stderrFd;
if (Interop.Sys.ForkAndExecProcess(
filename, argv, envp, cwd,
startInfo.RedirectStandardInput, startInfo.RedirectStandardOutput, startInfo.RedirectStandardError,
out childPid,
out stdinFd, out stdoutFd, out stderrFd) != 0)
{
throw new Win32Exception();
}
// Store the child's information into this Process object.
Debug.Assert(childPid >= 0);
SetProcessHandle(new SafeProcessHandle(childPid));
SetProcessId(childPid);
// Configure the parent's ends of the redirection streams.
if (startInfo.RedirectStandardInput)
{
Debug.Assert(stdinFd >= 0);
_standardInput = new StreamWriter(OpenStream(stdinFd, FileAccess.Write),
new UTF8Encoding(encoderShouldEmitUTF8Identifier: false), StreamBufferSize) { AutoFlush = true };
}
if (startInfo.RedirectStandardOutput)
{
Debug.Assert(stdoutFd >= 0);
_standardOutput = new StreamReader(OpenStream(stdoutFd, FileAccess.Read),
startInfo.StandardOutputEncoding ?? Encoding.UTF8, true, StreamBufferSize);
}
if (startInfo.RedirectStandardError)
{
Debug.Assert(stderrFd >= 0);
_standardError = new StreamReader(OpenStream(stderrFd, FileAccess.Read),
startInfo.StandardErrorEncoding ?? Encoding.UTF8, true, StreamBufferSize);
}
return true;
}
// -----------------------------
// ---- PAL layer ends here ----
// -----------------------------
/// <summary>Finalizable holder for the underlying shared wait state object.</summary>
private ProcessWaitState.Holder _waitStateHolder;
/// <summary>Size to use for redirect streams and stream readers/writers.</summary>
private const int StreamBufferSize = 4096;
/// <summary>Converts the filename and arguments information from a ProcessStartInfo into an argv array.</summary>
/// <param name="psi">The ProcessStartInfo.</param>
/// <returns>The argv array.</returns>
private static string[] CreateArgv(ProcessStartInfo psi)
{
string argv0 = psi.FileName; // pass filename (instead of resolved path) as argv[0], to match what caller supplied
if (string.IsNullOrEmpty(psi.Arguments))
{
return new string[] { argv0 };
}
else
{
var argvList = new List<string>();
argvList.Add(argv0);
ParseArgumentsIntoList(psi.Arguments, argvList);
return argvList.ToArray();
}
}
/// <summary>Converts the environment variables information from a ProcessStartInfo into an envp array.</summary>
/// <param name="psi">The ProcessStartInfo.</param>
/// <returns>The envp array.</returns>
private static string[] CreateEnvp(ProcessStartInfo psi)
{
var envp = new string[psi.Environment.Count];
int index = 0;
foreach (var pair in psi.Environment)
{
envp[index++] = pair.Key + "=" + pair.Value;
}
return envp;
}
/// <summary>Resolves a path to the filename passed to ProcessStartInfo.</summary>
/// <param name="filename">The filename.</param>
/// <returns>The resolved path.</returns>
private static string ResolvePath(string filename)
{
// Follow the same resolution that Windows uses with CreateProcess:
// 1. First try the exact path provided
// 2. Then try the file relative to the executable directory
// 3. Then try the file relative to the current directory
// 4. then try the file in each of the directories specified in PATH
// Windows does additional Windows-specific steps between 3 and 4,
// and we ignore those here.
// If the filename is a complete path, use it, regardless of whether it exists.
if (Path.IsPathRooted(filename))
{
// In this case, it doesn't matter whether the file exists or not;
// it's what the caller asked for, so it's what they'll get
return filename;
}
// Then check the executable's directory
string path = GetExePath();
if (path != null)
{
try
{
path = Path.Combine(Path.GetDirectoryName(path), filename);
if (File.Exists(path))
{
return path;
}
}
catch (ArgumentException) { } // ignore any errors in data that may come from the exe path
}
// Then check the current directory
path = Path.Combine(Directory.GetCurrentDirectory(), filename);
if (File.Exists(path))
{
return path;
}
// Then check each directory listed in the PATH environment variables
string pathEnvVar = Environment.GetEnvironmentVariable("PATH");
if (pathEnvVar != null)
{
var pathParser = new StringParser(pathEnvVar, ':', skipEmpty: true);
while (pathParser.MoveNext())
{
string subPath = pathParser.ExtractCurrent();
path = Path.Combine(subPath, filename);
if (File.Exists(path))
{
return path;
}
}
}
// Could not find the file
throw new Win32Exception(Interop.Error.ENOENT.Info().RawErrno);
}
/// <summary>Convert a number of "jiffies", or ticks, to a TimeSpan.</summary>
/// <param name="ticks">The number of ticks.</param>
/// <returns>The equivalent TimeSpan.</returns>
internal static TimeSpan TicksToTimeSpan(double ticks)
{
// Look up the number of ticks per second in the system's configuration,
// then use that to convert to a TimeSpan
long ticksPerSecond = Interop.Sys.SysConf(Interop.Sys.SysConfName._SC_CLK_TCK);
if (ticksPerSecond <= 0)
{
throw new Win32Exception();
}
return TimeSpan.FromSeconds(ticks / (double)ticksPerSecond);
}
/// <summary>Opens a stream around the specified file descriptor and with the specified access.</summary>
/// <param name="fd">The file descriptor.</param>
/// <param name="access">The access mode.</param>
/// <returns>The opened stream.</returns>
private static FileStream OpenStream(int fd, FileAccess access)
{
Debug.Assert(fd >= 0);
return new FileStream(
new SafeFileHandle((IntPtr)fd, ownsHandle: true),
access, StreamBufferSize, isAsync: false);
}
/// <summary>Parses a command-line argument string into a list of arguments.</summary>
/// <param name="arguments">The argument string.</param>
/// <param name="results">The list into which the component arguments should be stored.</param>
private static void ParseArgumentsIntoList(string arguments, List<string> results)
{
var currentArgument = new StringBuilder();
bool inQuotes = false;
// Iterate through all of the characters in the argument string
for (int i = 0; i < arguments.Length; i++)
{
char c = arguments[i];
// If this is an escaped double-quote, just add a '"' to the current
// argument and then skip past it in the input.
if (c == '\\' && i < arguments.Length - 1 && arguments[i + 1] == '"')
{
currentArgument.Append('"');
i++;
continue;
}
// If this is a double quote, track whether we're inside of quotes or not.
// Anything within quotes will be treated as a single argument, even if
// it contains spaces.
if (c == '"')
{
inQuotes = !inQuotes;
continue;
}
// If this is a space and we're not in quotes, we're done with the current
// argument, and if we've built up any characters in the current argument,
// it should be added to the results and then reset for the next one.
if (c == ' ' && !inQuotes)
{
if (currentArgument.Length > 0)
{
results.Add(currentArgument.ToString());
currentArgument.Clear();
}
continue;
}
// Nothing special; add the character to the current argument.
currentArgument.Append(c);
}
// If we reach the end of the string and we still have anything in our current
// argument buffer, treat it as an argument to be added to the results.
if (currentArgument.Length > 0)
{
results.Add(currentArgument.ToString());
}
}
/// <summary>Gets the wait state for this Process object.</summary>
private ProcessWaitState GetWaitState()
{
if (_waitStateHolder == null)
{
EnsureState(State.HaveId);
_waitStateHolder = new ProcessWaitState.Holder(_processId);
}
return _waitStateHolder._state;
}
}
}
| |
/*
* Author: Tony Brix, http://tonybrix.info
* License: MIT
*/
using System;
using System.ComponentModel;
using System.Drawing;
using System.Threading.Tasks;
using System.Windows.Forms;
using Microsoft.WindowsAPICodePack.Taskbar;
public class ProgressDialog : IDisposable
{
private BackgroundWorker worker = new BackgroundWorker();
private dialogForm dialog = new dialogForm();
public event CancelEventHandler Cancelled;
public event RunWorkerCompletedEventHandler Completed;
public event ProgressChangedEventHandler ProgressChanged;
public event DoWorkEventHandler DoWork;
private bool disposed = false;
public void Dispose()
{
Dispose(true);
// This object will be cleaned up by the Dispose method.
// Therefore, you should call GC.SupressFinalize to
// take this object off the finalization queue
// and prevent finalization code for this object
// from executing a second time.
GC.SuppressFinalize(this);
}
protected virtual void Dispose(bool disposing)
{
// Check to see if Dispose has already been called.
if (!this.disposed)
{
// If disposing equals true, dispose all managed
// and unmanaged resources.
if (disposing)
{
// Dispose managed resources.
worker.Dispose();
dialog.Dispose();
}
// Note disposing has been done.
disposed = true;
}
}
public ProgressDialog()
{
worker = new BackgroundWorker();
worker.ProgressChanged += Worker_ProgressChanged;
worker.RunWorkerCompleted += Worker_RunWorkerCompleted;
worker.DoWork += worker_DoWork;
worker.WorkerSupportsCancellation = true;
worker.WorkerReportsProgress = true;
dialog.Cancelled += dialog_Cancelled;
}
void dialog_Cancelled(object sender, CancelEventArgs e)
{
worker.CancelAsync();
if (Cancelled != null)
{
Cancelled(this, e);
}
}
void worker_DoWork(object sender, DoWorkEventArgs e)
{
if (DoWork != null)
{
DoWork(this, e);
e.Cancel = IsCancelled || e.Cancel;
}
else
{
MessageBox.Show("No work to do!", "No Work", MessageBoxButtons.OK, MessageBoxIcon.Error);
}
}
public void Run()
{
if (TaskbarManager.IsPlatformSupported)
{
TaskbarManager.Instance.SetProgressState(TaskbarProgressBarState.Normal);
}
worker.RunWorkerAsync();
dialog.ShowDialog();
}
public void Run(object argument)
{
if (TaskbarManager.IsPlatformSupported)
{
TaskbarManager.Instance.SetProgressState(TaskbarProgressBarState.Normal);
}
worker.RunWorkerAsync(argument);
dialog.ShowDialog();
}
private void Worker_RunWorkerCompleted(object sender, RunWorkerCompletedEventArgs e)
{
if (Completed != null)
{
Completed(this, e);
}
if (TaskbarManager.IsPlatformSupported)
{
TaskbarManager.Instance.SetProgressState(TaskbarProgressBarState.NoProgress);
}
dialog.Close();
}
private void Worker_ProgressChanged(object sender, ProgressChangedEventArgs e)
{
if (ProgressChanged != null)
{
ProgressChanged(this, e);
}
}
public void ReportProgress(int percentProgress, object userState)
{
worker.ReportProgress(percentProgress, userState);
}
public void ReportProgress(int percentProgress)
{
worker.ReportProgress(percentProgress);
}
public string Title
{
get { return dialog.Text; }
set { dialog.Text = value; }
}
public string Message
{
get { return dialog.message.Text; }
set { dialog.message.Text = value; }
}
public int Progress
{
get { return dialog.progressBar.Value; }
set {
if (TaskbarManager.IsPlatformSupported)
{
TaskbarManager.Instance.SetProgressValue(value, 100);
}
dialog.progressBar.Value = value;
}
}
public bool IsCancelled
{
get { return worker.CancellationPending; }
}
public ProgressBarStyle Style
{
get { return dialog.progressBar.Style; }
set { dialog.progressBar.Style = value; }
}
public IWin32Window Window
{
get { return dialog; }
}
private class dialogForm : Form
{
public Label message;
public ProgressBar progressBar;
private Button bCancel;
public event CancelEventHandler Cancelled;
public dialogForm()
{
this.message = new Label();
this.progressBar = new ProgressBar();
this.bCancel = new Button();
this.SuspendLayout();
//
// message
//
this.message.AutoSize = true;
this.message.Location = new Point(12, 9);
this.message.Name = "message";
this.message.Size = new Size(54, 13);
this.message.TabIndex = 0;
this.message.Text = "Loading...";
//
// progressBar
//
this.progressBar.Anchor = (AnchorStyles)(AnchorStyles.Top | AnchorStyles.Bottom | AnchorStyles.Left | AnchorStyles.Right);
this.progressBar.Location = new Point(12, 25);
this.progressBar.Name = "progressBar";
this.progressBar.Size = new Size(421, 23);
this.progressBar.TabIndex = 1;
//
// bCancel
//
this.bCancel.Anchor = (AnchorStyles)(AnchorStyles.Bottom | AnchorStyles.Right);
this.bCancel.Location = new Point(358, 54);
this.bCancel.Name = "bCancel";
this.bCancel.Size = new Size(75, 23);
this.bCancel.TabIndex = 2;
this.bCancel.Text = "Cancel";
this.bCancel.UseVisualStyleBackColor = true;
this.bCancel.Click += new EventHandler(this.bCancel_Click);
//
// loadingDialog
//
this.AutoScaleDimensions = new SizeF(6F, 13F);
this.AutoScaleMode = AutoScaleMode.Font;
this.ClientSize = new Size(445, 89);
this.ControlBox = false;
this.Controls.Add(this.bCancel);
this.Controls.Add(this.progressBar);
this.Controls.Add(this.message);
this.FormBorderStyle = FormBorderStyle.FixedToolWindow;
this.Name = "loadingDialog";
this.ShowIcon = false;
this.ShowInTaskbar = false;
this.Text = "Loading";
this.ResumeLayout(false);
this.PerformLayout();
}
private void bCancel_Click(object sender, EventArgs e)
{
if (TaskbarManager.IsPlatformSupported)
{
TaskbarManager.Instance.SetProgressState(TaskbarProgressBarState.Error);
}
if (MessageBox.Show("Are you sure you want to cancel?", "Cancel", MessageBoxButtons.YesNo, MessageBoxIcon.Question, MessageBoxDefaultButton.Button2) == DialogResult.Yes)
{
bCancel.Enabled = false;
this.Text = "Cancelling...";
if (Cancelled != null)
{
Cancelled(this, new CancelEventArgs(true));
}
}
}
}
}
| |
/*
* Copyright (c) Contributors, http://opensimulator.org/
* See CONTRIBUTORS.TXT for a full list of copyright holders.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
* * Neither the name of the OpenSimulator Project nor the
* names of its contributors may be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE DEVELOPERS ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
* WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE CONTRIBUTORS BE LIABLE FOR ANY
* DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
* (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
* LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
* ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
* SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using log4net;
using Mono.Addins;
using Nini.Config;
using OpenMetaverse;
using OpenSim.Framework;
using OpenSim.Region.Framework.Interfaces;
using OpenSim.Region.Framework.Scenes;
using OpenSim.Services.Interfaces;
using GridRegion = OpenSim.Services.Interfaces.GridRegion;
namespace OpenSim.Region.CoreModules.ServiceConnectorsOut.Simulation
{
[Extension(Path = "/OpenSim/RegionModules", NodeName = "RegionModule", Id = "LocalSimulationConnectorModule")]
public class LocalSimulationConnectorModule : ISharedRegionModule, ISimulationService
{
private static readonly ILog m_log = LogManager.GetLogger(MethodBase.GetCurrentMethod().DeclaringType);
/// <summary>
/// Version of this service.
/// </summary>
/// <remarks>
/// Currently valid versions are "SIMULATION/0.1" and "SIMULATION/0.2"
/// </remarks>
public string ServiceVersion { get; set; }
/// <summary>
/// Map region ID to scene.
/// </summary>
private Dictionary<UUID, Scene> m_scenes = new Dictionary<UUID, Scene>();
/// <summary>
/// Is this module enabled?
/// </summary>
private bool m_ModuleEnabled = false;
#region Region Module interface
public void Initialise(IConfigSource configSource)
{
IConfig moduleConfig = configSource.Configs["Modules"];
if (moduleConfig != null)
{
string name = moduleConfig.GetString("SimulationServices", "");
if (name == Name)
{
InitialiseService(configSource);
m_ModuleEnabled = true;
m_log.Info("[LOCAL SIMULATION CONNECTOR]: Local simulation enabled.");
}
}
}
public void InitialiseService(IConfigSource configSource)
{
ServiceVersion = "SIMULATION/0.2";
IConfig config = configSource.Configs["SimulationService"];
if (config != null)
{
ServiceVersion = config.GetString("ConnectorProtocolVersion", ServiceVersion);
if (ServiceVersion != "SIMULATION/0.1" && ServiceVersion != "SIMULATION/0.2")
throw new Exception(string.Format("Invalid ConnectorProtocolVersion {0}", ServiceVersion));
m_log.InfoFormat(
"[LOCAL SIMULATION CONNECTOR]: Initialized with connector protocol version {0}", ServiceVersion);
}
}
public void PostInitialise()
{
}
public void AddRegion(Scene scene)
{
if (!m_ModuleEnabled)
return;
Init(scene);
scene.RegisterModuleInterface<ISimulationService>(this);
}
public void RemoveRegion(Scene scene)
{
if (!m_ModuleEnabled)
return;
RemoveScene(scene);
scene.UnregisterModuleInterface<ISimulationService>(this);
}
public void RegionLoaded(Scene scene)
{
}
public void Close()
{
}
public Type ReplaceableInterface
{
get { return null; }
}
public string Name
{
get { return "LocalSimulationConnectorModule"; }
}
/// <summary>
/// Can be called from other modules.
/// </summary>
/// <param name="scene"></param>
public void RemoveScene(Scene scene)
{
lock (m_scenes)
{
if (m_scenes.ContainsKey(scene.RegionInfo.RegionID))
m_scenes.Remove(scene.RegionInfo.RegionID);
else
m_log.WarnFormat(
"[LOCAL SIMULATION CONNECTOR]: Tried to remove region {0} but it was not present",
scene.RegionInfo.RegionName);
}
}
/// <summary>
/// Can be called from other modules.
/// </summary>
/// <param name="scene"></param>
public void Init(Scene scene)
{
lock (m_scenes)
{
if (!m_scenes.ContainsKey(scene.RegionInfo.RegionID))
m_scenes[scene.RegionInfo.RegionID] = scene;
else
m_log.WarnFormat(
"[LOCAL SIMULATION CONNECTOR]: Tried to add region {0} but it is already present",
scene.RegionInfo.RegionName);
}
}
#endregion
#region ISimulation
public IScene GetScene(UUID regionId)
{
if (m_scenes.ContainsKey(regionId))
{
return m_scenes[regionId];
}
else
{
// FIXME: This was pre-existing behaviour but possibly not a good idea, since it hides an error rather
// than making it obvious and fixable. Need to see if the error message comes up in practice.
Scene s = m_scenes.Values.ToArray()[0];
m_log.ErrorFormat(
"[LOCAL SIMULATION CONNECTOR]: Region with id {0} not found. Returning {1} {2} instead",
regionId, s.RegionInfo.RegionName, s.RegionInfo.RegionID);
return s;
}
}
public ISimulationService GetInnerService()
{
return this;
}
/**
* Agent-related communications
*/
public bool CreateAgent(GridRegion destination, AgentCircuitData aCircuit, uint teleportFlags, out string reason)
{
if (destination == null)
{
reason = "Given destination was null";
m_log.DebugFormat("[LOCAL SIMULATION CONNECTOR]: CreateAgent was given a null destination");
return false;
}
if (m_scenes.ContainsKey(destination.RegionID))
{
// m_log.DebugFormat("[LOCAL SIMULATION CONNECTOR]: Found region {0} to send SendCreateChildAgent", destination.RegionName);
return m_scenes[destination.RegionID].NewUserConnection(aCircuit, teleportFlags, out reason);
}
reason = "Did not find region " + destination.RegionName;
return false;
}
public bool UpdateAgent(GridRegion destination, AgentData cAgentData)
{
if (destination == null)
return false;
if (m_scenes.ContainsKey(destination.RegionID))
{
// m_log.DebugFormat(
// "[LOCAL SIMULATION CONNECTOR]: Found region {0} {1} to send AgentUpdate",
// destination.RegionName, destination.RegionID);
return m_scenes[destination.RegionID].IncomingUpdateChildAgent(cAgentData);
}
// m_log.DebugFormat(
// "[LOCAL COMMS]: Did not find region {0} {1} for ChildAgentUpdate",
// destination.RegionName, destination.RegionID);
return false;
}
public bool UpdateAgent(GridRegion destination, AgentPosition agentPosition)
{
if (destination == null)
return false;
// We limit the number of messages sent for a position change to just one per
// simulator so when we receive the update we need to hand it to each of the
// scenes; scenes each check to see if the is a scene presence for the avatar
// note that we really don't need the GridRegion for this call
foreach (Scene s in m_scenes.Values)
{
// m_log.Debug("[LOCAL COMMS]: Found region to send ChildAgentUpdate");
s.IncomingUpdateChildAgent(agentPosition);
}
//m_log.Debug("[LOCAL COMMS]: region not found for ChildAgentUpdate");
return true;
}
public bool QueryAccess(GridRegion destination, UUID id, Vector3 position, out string version, out string reason)
{
reason = "Communications failure";
version = ServiceVersion;
if (destination == null)
return false;
if (m_scenes.ContainsKey(destination.RegionID))
{
// m_log.DebugFormat(
// "[LOCAL SIMULATION CONNECTOR]: Found region {0} {1} to send AgentUpdate",
// s.RegionInfo.RegionName, destination.RegionHandle);
return m_scenes[destination.RegionID].QueryAccess(id, position, out reason);
}
//m_log.Debug("[LOCAL COMMS]: region not found for QueryAccess");
return false;
}
public bool ReleaseAgent(UUID originId, UUID agentId, string uri)
{
if (m_scenes.ContainsKey(originId))
{
// m_log.DebugFormat(
// "[LOCAL SIMULATION CONNECTOR]: Found region {0} {1} to send AgentUpdate",
// s.RegionInfo.RegionName, destination.RegionHandle);
m_scenes[originId].EntityTransferModule.AgentArrivedAtDestination(agentId);
return true;
}
//m_log.Debug("[LOCAL COMMS]: region not found in SendReleaseAgent " + origin);
return false;
}
public bool CloseAgent(GridRegion destination, UUID id, string auth_token)
{
if (destination == null)
return false;
if (m_scenes.ContainsKey(destination.RegionID))
{
// m_log.DebugFormat(
// "[LOCAL SIMULATION CONNECTOR]: Found region {0} {1} to send AgentUpdate",
// s.RegionInfo.RegionName, destination.RegionHandle);
m_scenes[destination.RegionID].CloseAgent(id, false, auth_token);
return true;
}
//m_log.Debug("[LOCAL COMMS]: region not found in SendCloseAgent");
return false;
}
/**
* Object-related communications
*/
public bool CreateObject(GridRegion destination, Vector3 newPosition, ISceneObject sog, bool isLocalCall)
{
if (destination == null)
return false;
if (m_scenes.ContainsKey(destination.RegionID))
{
// m_log.DebugFormat(
// "[LOCAL SIMULATION CONNECTOR]: Found region {0} {1} to send AgentUpdate",
// s.RegionInfo.RegionName, destination.RegionHandle);
Scene s = m_scenes[destination.RegionID];
if (isLocalCall)
{
// We need to make a local copy of the object
ISceneObject sogClone = sog.CloneForNewScene();
sogClone.SetState(sog.GetStateSnapshot(), s);
return s.IncomingCreateObject(newPosition, sogClone);
}
else
{
// Use the object as it came through the wire
return s.IncomingCreateObject(newPosition, sog);
}
}
return false;
}
#endregion /* IInterregionComms */
#region Misc
public bool IsLocalRegion(ulong regionhandle)
{
foreach (Scene s in m_scenes.Values)
if (s.RegionInfo.RegionHandle == regionhandle)
return true;
return false;
}
public bool IsLocalRegion(UUID id)
{
return m_scenes.ContainsKey(id);
}
#endregion
}
}
| |
//
// DatabaseTrackListModel.cs
//
// Author:
// Aaron Bockover <[email protected]>
// Gabriel Burt <[email protected]>
//
// Copyright (C) 2007 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Text;
using System.Collections.Generic;
using System.Linq;
using Mono.Unix;
using Hyena;
using Hyena.Data;
using Hyena.Data.Sqlite;
using Hyena.Query;
using Banshee.Base;
using Banshee.Query;
using Banshee.Database;
using Banshee.PlaybackController;
namespace Banshee.Collection.Database
{
public class DatabaseTrackListModel : TrackListModel, IExportableModel,
ICacheableDatabaseModel, IFilterable, ISortable, ICareAboutView, ISearchable
{
private readonly BansheeDbConnection connection;
private IDatabaseTrackModelProvider provider;
protected IDatabaseTrackModelCache cache;
private Banshee.Sources.DatabaseSource source;
private long count;
private long filtered_count;
private TimeSpan filtered_duration, duration;
private long filtered_filesize, filesize;
private ISortableColumn sort_column;
private string sort_query;
private bool forced_sort_query;
private string reload_fragment;
private string join_table, join_fragment, join_primary_key, join_column, condition, condition_from;
private string query_fragment;
private string user_query;
private int rows_in_view;
public DatabaseTrackListModel (BansheeDbConnection connection, IDatabaseTrackModelProvider provider, Banshee.Sources.DatabaseSource source)
{
this.connection = connection;
this.provider = provider;
this.source = source;
SelectAggregates = String.Format ("SUM({0}), SUM({1})",
BansheeQuery.DurationField.Column, BansheeQuery.FileSizeField.Column);
Selection.Changed += delegate {
if (SelectionAggregatesHandler != null) {
cache.UpdateSelectionAggregates (SelectionAggregatesHandler);
}
};
}
protected Action<IDataReader> SelectionAggregatesHandler { get; set; }
protected HyenaSqliteConnection Connection {
get { return connection; }
}
private bool initialized = false;
public void Initialize (IDatabaseTrackModelCache cache)
{
if (initialized)
return;
initialized = true;
this.cache = cache;
cache.AggregatesUpdated += HandleCacheAggregatesUpdated;
GenerateSortQueryPart ();
}
private bool have_new_user_query = true;
private void GenerateUserQueryFragment ()
{
if (!have_new_user_query)
return;
if (String.IsNullOrEmpty (UserQuery)) {
query_fragment = null;
query_tree = null;
} else {
query_tree = UserQueryParser.Parse (UserQuery, BansheeQuery.FieldSet);
query_fragment = (query_tree == null) ? null : query_tree.ToSql (BansheeQuery.FieldSet);
if (query_fragment != null && query_fragment.Length == 0) {
query_fragment = null;
query_tree = null;
}
}
have_new_user_query = false;
}
private QueryNode query_tree;
public QueryNode Query {
get { return query_tree; }
}
protected string SortQuery {
get { return sort_query; }
set { sort_query = value; }
}
protected virtual void GenerateSortQueryPart ()
{
SortQuery = (SortColumn == null || SortColumn.SortType == SortType.None)
? (SortColumn != null && source is Banshee.Playlist.PlaylistSource)
? "CorePlaylistEntries.ViewOrder ASC, CorePlaylistEntries.EntryID ASC"
: BansheeQuery.GetSort (BansheeQuery.ArtistField, true)
: BansheeQuery.GetSort (SortColumn.Field, SortColumn.SortType == SortType.Ascending);
}
private SortType last_sort_type = SortType.None;
public bool Sort (ISortableColumn column)
{
lock (this) {
if (forced_sort_query) {
return false;
}
// Don't sort by the same column and the same sort-type more than once
if (sort_column != null && sort_column == column && column.SortType == last_sort_type) {
return false;
}
last_sort_type = column.SortType;
sort_column = column;
GenerateSortQueryPart ();
cache.Clear ();
}
return true;
}
public void Resort ()
{
var column = sort_column;
sort_column = null;
Sort (column);
}
private void HandleCacheAggregatesUpdated (IDataReader reader)
{
filtered_duration = TimeSpan.FromMilliseconds (reader[1] == null ? 0 : Convert.ToInt64 (reader[1]));
filtered_filesize = reader[2] == null ? 0 : Convert.ToInt64 (reader[2]);
}
public override void Clear ()
{
cache.Clear ();
count = 0;
filesize = 0;
duration = TimeSpan.Zero;
filtered_count = 0;
OnCleared ();
}
public void InvalidateCache (bool notify)
{
if (cache == null) {
Log.ErrorFormat ("Called invalidate cache for {0}'s track model, but cache is null", source);
} else {
cache.Clear ();
if (notify) {
OnReloaded ();
}
}
}
private string unfiltered_query;
public virtual string UnfilteredQuery {
get {
return unfiltered_query ?? (unfiltered_query = String.Format (
"FROM {0} WHERE {1} {2}",
FromFragment,
String.IsNullOrEmpty (provider.Where) ? "1=1" : provider.Where,
ConditionFragment
));
}
}
private string from;
protected string From {
get {
if (from == null) {
from = provider.From;
int i = from.IndexOf (',');
if (i > 0) {
// Force the join order to fix bgo#581103 and bgo#603661
// See section 5.2 in http://www.sqlite.org/optoverview.html
from = from.Substring (0, i) + " CROSS JOIN " + from.Substring (i + 1);
}
}
return from;
}
set { from = value; }
}
private string from_fragment;
public string FromFragment {
get { return from_fragment ?? (from_fragment = String.Format ("{0}{1}", From, JoinFragment)); }
}
public virtual void UpdateUnfilteredAggregates ()
{
HyenaSqliteCommand count_command = new HyenaSqliteCommand (String.Format (
"SELECT COUNT(*), SUM({0}), SUM({1}) {2}",
BansheeQuery.FileSizeField.Column, BansheeQuery.DurationField.Column, UnfilteredQuery
));
using (HyenaDataReader reader = new HyenaDataReader (connection.Query (count_command))) {
count = reader.Get<long> (0);
filesize = reader.Get<long> (1);
duration = reader.Get<TimeSpan> (2);
}
}
public override void Reload ()
{
Reload (null);
}
public void Reload (IListModel reloadTrigger)
{
if (cache == null) {
Log.WarningFormat ("Called Reload on {0} for source {1} but cache is null; Did you forget to call AfterInitialized () in your DatabaseSource ctor?",
this, source == null ? "null source!" : source.Name);
return;
}
lock (this) {
GenerateUserQueryFragment ();
UpdateUnfilteredAggregates ();
cache.SaveSelection ();
List<IFilterListModel> reload_models = new List<IFilterListModel> ();
bool found = (reloadTrigger == null);
foreach (IFilterListModel filter in source.CurrentFilters) {
if (found) {
if (filter != null) {
reload_models.Add (filter);
}
} else if (filter == reloadTrigger) {
found = true;
}
}
if (reload_models.Count == 0) {
ReloadWithFilters (true);
} else {
ReloadWithoutFilters ();
foreach (IFilterListModel model in reload_models) {
model.Reload (false);
}
bool have_filters = false;
foreach (IFilterListModel filter in source.CurrentFilters) {
have_filters |= !filter.Selection.AllSelected;
}
// Unless both artist/album selections are "all" (eg unfiltered), reload
// the track model again with the artist/album filters now in place.
if (have_filters) {
ReloadWithFilters (true);
}
}
cache.UpdateAggregates ();
cache.RestoreSelection ();
if (SelectionAggregatesHandler != null) {
cache.UpdateSelectionAggregates (SelectionAggregatesHandler);
}
filtered_count = cache.Count;
OnReloaded ();
// Trigger these after the track list, b/c visually it's more important for it to update first
foreach (IFilterListModel model in reload_models) {
model.RaiseReloaded ();
}
}
}
private void ReloadWithoutFilters ()
{
ReloadWithFilters (false);
}
private void ReloadWithFilters (bool with_filters)
{
StringBuilder qb = new StringBuilder ();
qb.Append (UnfilteredQuery);
if (with_filters) {
foreach (IFilterListModel filter in source.CurrentFilters) {
string filter_sql = filter.GetSqlFilter ();
if (filter_sql != null) {
qb.Append (" AND ");
qb.Append (filter_sql);
}
}
}
if (query_fragment != null) {
qb.Append (" AND ");
qb.Append (query_fragment);
}
if (sort_query != null) {
qb.Append (" ORDER BY ");
qb.Append (sort_query);
}
reload_fragment = qb.ToString ();
cache.Reload ();
}
private QueryFieldSet query_fields = BansheeQuery.FieldSet;
public QueryFieldSet QueryFields {
get { return query_fields; }
protected set { query_fields = value; }
}
public bool Contains (DatabaseTrackInfo track)
{
return track != null && connection.Query<bool> (
String.Format ("SELECT COUNT(*) > 0 {0} AND CoreTracks.TrackID = ? LIMIT 1", UnfilteredQuery),
track.TrackId
);
}
public int IndexOf (QueryNode query, long offset)
{
lock (this) {
if (query == null) {
return -1;
}
return (int) cache.IndexOf (query.ToSql (QueryFields), offset);
}
}
public override int IndexOf (TrackInfo track)
{
lock (this) {
if (track is DatabaseTrackInfo) {
return (int) cache.IndexOf (track as DatabaseTrackInfo);
} else if (track is Banshee.Streaming.RadioTrackInfo) {
return (int) cache.IndexOf ((track as Banshee.Streaming.RadioTrackInfo).ParentTrack as DatabaseTrackInfo);
}
return -1;
}
}
public int IndexOfFirst (TrackInfo track)
{
lock (this) {
return IndexOf (cache.GetSingleWhere ("AND MetadataHash = ? ORDER BY OrderID", track.MetadataHash));
}
}
public override TrackInfo GetRandom (DateTime notPlayedSince)
{
return GetRandom (notPlayedSince, "song", true, false, Shuffler.Playback);
}
public TrackInfo GetRandom (DateTime notPlayedSince, string shuffle_mode, bool repeat, bool resetSinceTime, Shuffler shuffler)
{
lock (this) {
shuffler.SetModelAndCache (this, cache);
return shuffler.GetRandom (notPlayedSince, shuffle_mode, repeat, resetSinceTime);
}
}
public override TrackInfo this[int index] {
get {
lock (this) {
return cache.GetValue (index);
}
}
}
public override int Count {
get { return (int) filtered_count; }
}
public virtual TimeSpan Duration {
get { return filtered_duration; }
}
public virtual long FileSize {
get { return filtered_filesize; }
}
public long UnfilteredFileSize {
get { return filesize; }
}
public TimeSpan UnfilteredDuration {
get { return duration; }
}
public int UnfilteredCount {
get { return (int) count; }
set { count = value; }
}
public string UserQuery {
get { return user_query; }
set {
lock (this) {
user_query = value;
have_new_user_query = true;
}
}
}
public string ForcedSortQuery {
get { return forced_sort_query ? sort_query : null; }
set {
forced_sort_query = value != null;
sort_query = value;
if (cache != null) {
cache.Clear ();
}
}
}
public string JoinTable {
get { return join_table; }
set {
join_table = value;
join_fragment = String.Format (", {0}", join_table);
}
}
public string JoinFragment {
get { return join_fragment; }
}
public string JoinPrimaryKey {
get { return join_primary_key; }
set { join_primary_key = value; }
}
public string JoinColumn {
get { return join_column; }
set { join_column = value; }
}
public void AddCondition (string part)
{
AddCondition (null, part);
}
public void AddCondition (string tables, string part)
{
if (!String.IsNullOrEmpty (part)) {
condition = condition == null ? part : String.Format ("{0} AND {1}", condition, part);
if (!String.IsNullOrEmpty (tables)) {
condition_from = condition_from == null ? tables : String.Format ("{0}, {1}", condition_from, tables);
}
}
}
public string Condition {
get { return condition; }
}
private string condition_from_fragment;
public string ConditionFromFragment {
get {
if (condition_from_fragment == null) {
if (JoinFragment == null) {
condition_from_fragment = condition_from;
} else {
if (condition_from == null) {
condition_from = "CoreTracks";
}
condition_from_fragment = String.Format ("{0}{1}", condition_from, JoinFragment);
}
}
return condition_from_fragment;
}
}
public string ConditionFragment {
get { return PrefixCondition ("AND"); }
}
private string PrefixCondition (string prefix)
{
string condition = Condition;
return String.IsNullOrEmpty (condition)
? String.Empty
: String.Format (" {0} {1} ", prefix, condition);
}
public int CacheId {
get { return (int) cache.CacheId; }
}
public ISortableColumn SortColumn {
get { return sort_column; }
}
public virtual int RowsInView {
protected get { return rows_in_view; }
set { rows_in_view = value; }
}
int IExportableModel.GetLength ()
{
return Count;
}
IDictionary<string, object> IExportableModel.GetMetadata (int index)
{
return this[index].GenerateExportable ();
}
private string track_ids_sql;
public string TrackIdsSql {
get {
if (track_ids_sql == null) {
if (!CachesJoinTableEntries) {
track_ids_sql = "ItemID FROM CoreCache WHERE ModelID = ? LIMIT ?, ?";
} else {
track_ids_sql = String.Format (
"{0} FROM {1} WHERE {2} IN (SELECT ItemID FROM CoreCache WHERE ModelID = ? LIMIT ?, ?)",
JoinColumn, JoinTable, JoinPrimaryKey
);
}
}
return track_ids_sql;
}
}
private bool caches_join_table_entries = false;
public bool CachesJoinTableEntries {
get { return caches_join_table_entries; }
set { caches_join_table_entries = value; }
}
// Implement ICacheableModel
public int FetchCount {
get { return RowsInView > 0 ? RowsInView * 5 : 100; }
}
public string SelectAggregates { get; protected set; }
// Implement IDatabaseModel
public string ReloadFragment {
get { return reload_fragment; }
}
public bool CachesValues { get { return false; } }
}
}
| |
// Code generated by Microsoft (R) AutoRest Code Generator 1.2.1.0
// Changes may cause incorrect behavior and will be lost if the code is
// regenerated.
namespace ApplicationGateway
{
using Microsoft.Rest;
using Microsoft.Rest.Azure;
using Models;
using System.Collections;
using System.Collections.Generic;
using System.Threading;
using System.Threading.Tasks;
/// <summary>
/// RouteFiltersOperations operations.
/// </summary>
public partial interface IRouteFiltersOperations
{
/// <summary>
/// Deletes the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> DeleteWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='expand'>
/// Expands referenced express route bgp peering resources.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<RouteFilter>> GetWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, string expand = default(string), Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates or updates a route filter in a specified resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='routeFilterParameters'>
/// Parameters supplied to the create or update route filter operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<RouteFilter>> CreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, RouteFilter routeFilterParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates a route filter in a specified resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='routeFilterParameters'>
/// Parameters supplied to the update route filter operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<RouteFilter>> UpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, PatchRouteFilter routeFilterParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all route filters in a resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<RouteFilter>>> ListByResourceGroupWithHttpMessagesAsync(string resourceGroupName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all route filters in a subscription.
/// </summary>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<RouteFilter>>> ListWithHttpMessagesAsync(Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Deletes the specified route filter.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse> BeginDeleteWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Creates or updates a route filter in a specified resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='routeFilterParameters'>
/// Parameters supplied to the create or update route filter operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<RouteFilter>> BeginCreateOrUpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, RouteFilter routeFilterParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Updates a route filter in a specified resource group.
/// </summary>
/// <param name='resourceGroupName'>
/// The name of the resource group.
/// </param>
/// <param name='routeFilterName'>
/// The name of the route filter.
/// </param>
/// <param name='routeFilterParameters'>
/// Parameters supplied to the update route filter operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<RouteFilter>> BeginUpdateWithHttpMessagesAsync(string resourceGroupName, string routeFilterName, PatchRouteFilter routeFilterParameters, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all route filters in a resource group.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<RouteFilter>>> ListByResourceGroupNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
/// <summary>
/// Gets all route filters in a subscription.
/// </summary>
/// <param name='nextPageLink'>
/// The NextLink from the previous successful call to List operation.
/// </param>
/// <param name='customHeaders'>
/// The headers that will be added to request.
/// </param>
/// <param name='cancellationToken'>
/// The cancellation token.
/// </param>
/// <exception cref="Microsoft.Rest.Azure.CloudException">
/// Thrown when the operation returned an invalid status code
/// </exception>
/// <exception cref="Microsoft.Rest.SerializationException">
/// Thrown when unable to deserialize the response
/// </exception>
/// <exception cref="Microsoft.Rest.ValidationException">
/// Thrown when a required parameter is null
/// </exception>
Task<AzureOperationResponse<IPage<RouteFilter>>> ListNextWithHttpMessagesAsync(string nextPageLink, Dictionary<string, List<string>> customHeaders = null, CancellationToken cancellationToken = default(CancellationToken));
}
}
| |
// Copyright (c) Six Labors.
// Licensed under the Apache License, Version 2.0.
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.IO;
using System.Numerics;
using SixLabors.Fonts.Tables.General.Glyphs;
namespace SixLabors.Fonts.Tables
{
// Source code is based on https://github.com/LayoutFarm/Typography
// see https://github.com/LayoutFarm/Typography/blob/master/Typography.OpenFont/WebFont/Woff2Reader.cs
// TODO: There's still some cleanup required here to bring the code up to a maintainable standard.
internal static class Woff2Utils
{
// We don't reuse the const tag headers from our table types for clarity.
private static readonly string[] KnownTableTags =
{
"cmap", "head", "hhea", "hmtx", "maxp", "name", "OS/2", "post", "cvt ",
"fpgm", "glyf", "loca", "prep", "CFF ", "VORG", "EBDT", "EBLC", "gasp",
"hdmx", "kern", "LTSH", "PCLT", "VDMX", "vhea", "vmtx", "BASE", "GDEF",
"GPOS", "GSUB", "EBSC", "JSTF", "MATH", "CBDT", "CBLC", "COLR", "CPAL",
"SVG ", "sbix", "acnt", "avar", "bdat", "bloc", "bsln", "cvar", "fdsc",
"feat", "fmtx", "fvar", "gvar", "hsty", "just", "lcar", "mort", "morx",
"opbd", "prop", "trak", "Zapf", "Silf", "Glat", "Gloc", "Feat", "Sill"
};
private const byte OneMoreByteCode1 = 255;
private const byte OneMoreByteCode2 = 254;
private const byte WordCode = 253;
private const byte LowestUCode = 253;
public static ReadOnlyDictionary<string, TableHeader> ReadWoff2Headers(BigEndianBinaryReader reader, int tableCount)
{
uint expectedTableStartAt = 0;
var headers = new Dictionary<string, TableHeader>(tableCount);
for (int i = 0; i < tableCount; i++)
{
Woff2TableHeader woffTableHeader = Read(reader, expectedTableStartAt, out uint nextExpectedTableStartAt);
expectedTableStartAt = nextExpectedTableStartAt;
headers.Add(woffTableHeader.Tag, woffTableHeader);
}
return new ReadOnlyDictionary<string, TableHeader>(headers);
}
public static Woff2TableHeader Read(BigEndianBinaryReader reader, uint expectedTableStartAt, out uint nextExpectedTableStartAt)
{
// Leave the first byte open to store flagByte
const uint woff2FlagsTransform = 1 << 8;
byte flagsByte = reader.ReadByte();
int knownTable = flagsByte & 0x3F;
string tableName = knownTable == 0x3F ? reader.ReadTag() : KnownTableTags[knownTable];
uint flags = 0;
byte xformVersion = (byte)((flagsByte >> 6) & 0x03);
// 0 means xform for glyph/loca, non-0 for others
if (tableName is "glyf" or "loca")
{
if (xformVersion == 0)
{
flags |= woff2FlagsTransform;
}
}
else if (xformVersion != 0)
{
flags |= woff2FlagsTransform;
}
flags |= xformVersion;
if (!ReadUIntBase128(reader, out uint tableOrigLength))
{
throw new FontException("Error parsing woff2 table header");
}
uint tableTransformLength = tableOrigLength;
if ((flags & woff2FlagsTransform) != 0)
{
if (!ReadUIntBase128(reader, out tableTransformLength))
{
throw new FontException("Error parsing woff2 table header");
}
if (tableName == "loca" && tableTransformLength > 0)
{
throw new FontException("Error parsing woff2 table header");
}
}
nextExpectedTableStartAt = expectedTableStartAt + tableTransformLength;
if (nextExpectedTableStartAt < expectedTableStartAt)
{
throw new FontException("Error parsing woff2 table header");
}
return new Woff2TableHeader(tableName, 0, expectedTableStartAt, tableTransformLength);
}
public static GlyphLoader[] LoadAllGlyphs(BigEndianBinaryReader reader, EmptyGlyphLoader emptyGlyphLoader)
{
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | Data Type | Semantic | Description and value type (if applicable) |
// +===========+=======================+=======================================================================================================+
// | Fixed | version | = 0x00000000 |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt16 | numGlyphs | Number of glyphs |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt16 | indexFormat | Offset format for loca table, should be consistent with indexToLocFormat |
// | | | of the original head table (see specification) |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | nContourStreamSize | Size of nContour stream in bytes |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | nPointsStreamSize | Size of nPoints stream in bytes |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | flagStreamSize | Size of flag stream in bytes |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | glyphStreamSize | Size of glyph stream in bytes (a stream of variable-length encoded values, see description below) |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | compositeStreamSize | Size of composite stream in bytes (a stream of variable-length encoded values, see description below) |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | bboxStreamSize | Size of bbox data in bytes representing combined length of bboxBitmap (a packed bit array) |
// | | | and bboxStream (a stream of Int16 values) |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt32 | instructionStreamSize | Size of instruction stream (a stream of UInt8 values) |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | Int16 | nContourStream[] | Stream of Int16 values representing number of contours for each glyph record |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | 255UInt16 | nPointsStream[] | Stream of values representing number of outline points for each contour in glyph records |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt8 | flagStream[] | Stream of UInt8 values representing flag values for each outline point. |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | Vary | glyphStream[] | Stream of bytes representing point coordinate values using variable length |
// | | | encoding format (defined in subclause 5.2) |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | Vary | compositeStream[] | Stream of bytes representing component flag values and associated composite glyph data |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt8 | bboxBitmap[] | Bitmap (a numGlyphs-long bit array) indicating explicit bounding boxes |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | Int16 | bboxStream[] | Stream of Int16 values representing glyph bounding box data |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
// | UInt8 | instructionStream[] | Stream of UInt8 values representing a set of instructions for each corresponding glyph |
// +-----------+-----------------------+-------------------------------------------------------------------------------------------------------+
uint version = reader.ReadUInt32();
ushort numGlyphs = reader.ReadUInt16();
ushort indexFormatOffset = reader.ReadUInt16();
uint nContourStreamSize = reader.ReadUInt32();
uint nPointsStreamSize = reader.ReadUInt32();
uint flagStreamSize = reader.ReadUInt32();
uint glyphStreamSize = reader.ReadUInt32();
uint compositeStreamSize = reader.ReadUInt32();
uint bboxStreamSize = reader.ReadUInt32();
uint instructionStreamSize = reader.ReadUInt32();
long nCountStreamOffset = reader.BaseStream.Position;
long nPointStreamOffset = nCountStreamOffset + nContourStreamSize;
long flagStreamOffset = nPointStreamOffset + nPointsStreamSize;
long glyphStreamOffset = flagStreamOffset + flagStreamSize;
long compositeStreamOffset = glyphStreamOffset + glyphStreamSize;
long bboxStreamOffset = compositeStreamOffset + compositeStreamSize;
long instructionStreamOffset = bboxStreamOffset + bboxStreamSize;
var glyphs = new GlyphVector[numGlyphs];
var allGlyphs = new GlyphData[numGlyphs];
var glyphLoaders = new GlyphLoader[numGlyphs];
var compositeGlyphs = new List<ushort>();
int contourCount = 0;
for (ushort i = 0; i < numGlyphs; i++)
{
short numContour = reader.ReadInt16();
allGlyphs[i] = new GlyphData(i, numContour);
if (numContour > 0)
{
contourCount += numContour;
// >0 => simple glyph
// -1 = composite
// 0 = empty glyph
}
else if (numContour < 0)
{
// Composite glyph, resolve later.
compositeGlyphs.Add(i);
}
}
ushort[] pntPerContours = new ushort[contourCount];
for (int i = 0; i < contourCount; i++)
{
// Each of these is the number of points of that contour.
pntPerContours[i] = Read255UInt16(reader);
}
// FlagStream, flags value for each point.
// Each byte in flags stream represents one point.
byte[] flagStream = reader.ReadBytes((int)flagStreamSize);
// Some composite glyphs have instructions so we must check all composite glyphs before read the glyph stream.
using (var compositeMemoryStream = new MemoryStream())
{
reader.BaseStream.Position = compositeStreamOffset;
compositeMemoryStream.Write(reader.ReadBytes((int)compositeStreamSize), 0, (int)compositeStreamSize);
compositeMemoryStream.Position = 0;
using (var compositeReader = new BigEndianBinaryReader(compositeMemoryStream, false))
{
for (ushort i = 0; i < compositeGlyphs.Count; i++)
{
ushort compositeGlyphIndex = compositeGlyphs[i];
allGlyphs[compositeGlyphIndex].CompositeHasInstructions = CompositeHasInstructions(compositeReader);
}
}
reader.BaseStream.Position = glyphStreamOffset;
}
int curFlagsIndex = 0;
int pntContourIndex = 0;
long instructionsIndex = instructionStreamOffset;
for (int i = 0; i < allGlyphs.Length; i++)
{
glyphs[i] = ReadSimpleGlyphData(
reader,
ref allGlyphs[i],
pntPerContours,
ref pntContourIndex,
flagStream,
ref curFlagsIndex,
ref instructionsIndex);
}
// Now we read the composite stream again and create composite glyphs.
for (ushort i = 0; i < compositeGlyphs.Count; i++)
{
int compositeGlyphIndex = compositeGlyphs[i];
glyphs[compositeGlyphIndex] = ReadCompositeGlyphData(glyphs, reader);
}
int bitmapCount = (numGlyphs + 7) / 8;
byte[] bboxBitmap = ExpandBitmap(reader.ReadBytes(bitmapCount));
for (ushort i = 0; i < numGlyphs; i++)
{
GlyphData tempGlyph = allGlyphs[i];
byte hasBbox = bboxBitmap[i];
if (hasBbox == 1)
{
// Read bbox from the bboxstream.
glyphs[i] = GlyphVector.WithCompositeBounds(glyphs[i], Bounds.Load(reader));
}
else if (tempGlyph.NumContour < 0)
{
throw new NotSupportedException("Composite glyph must have a bounding box.");
}
}
for (ushort i = 0; i < numGlyphs; i++)
{
if (!glyphs[i].HasValue())
{
glyphLoaders[i] = emptyGlyphLoader;
continue;
}
glyphLoaders[i] = new TransformedGlyphLoader(glyphs[i]);
}
return glyphLoaders;
}
private static GlyphVector ReadSimpleGlyphData(
BigEndianBinaryReader reader,
ref GlyphData glyphData,
ushort[] pntPerContours,
ref int pntContourIndex,
byte[] flagStream,
ref int flagStreamIndex,
ref long instructionStreamOffset)
{
if (glyphData.NumContour == 0)
{
return default;
}
if (glyphData.NumContour < 0)
{
// Composite glyph. Check if this has instruction or not
// and read the length. We don't actually use the data but it ensures
// we maintain the correct location within the stream.
if (glyphData.CompositeHasInstructions)
{
Read255UInt16(reader);
}
return default; // Skip composite glyph (resolve later).
}
int curX = 0;
int curY = 0;
int numContour = glyphData.NumContour;
ushort[] endPoints = new ushort[numContour];
ushort pointCount = 0;
for (ushort i = 0; i < numContour; i++)
{
ushort numPoint = pntPerContours[pntContourIndex++];
pointCount += numPoint;
endPoints[i] = (ushort)(pointCount - 1);
}
var controlPoints = new Vector2[pointCount];
bool[] onCurves = new bool[pointCount];
int n = 0;
for (int i = 0; i < numContour; i++)
{
int endContour = endPoints[i];
for (; n <= endContour; ++n)
{
byte f = flagStream[flagStreamIndex++];
// int f1 = (f >> 7); // Most significant 1 bit -> on/off curve.
int xyFormat = f & 0x7F; // Remaining 7 bits x, y format.
TripleEncodingRecord enc = TripleEncodingTable.EncTable[xyFormat]; // 0-128
byte[] packedXY = reader.ReadBytes(enc.ByteCount - 1); // byte count include 1 byte flags, so actual read=> byteCount-1
int x;
int y;
switch (enc.XBits)
{
default:
throw new NotSupportedException();
case 0: // 0,8,
x = 0;
y = enc.Ty(packedXY[0]);
break;
case 4: // 4,4
x = enc.Tx(packedXY[0] >> 4);
y = enc.Ty(packedXY[0] & 0xF);
break;
case 8: // 8,0 or 8,8
x = enc.Tx(packedXY[0]);
y = (enc.YBits == 8) ?
enc.Ty(packedXY[1]) :
0;
break;
case 12: // 12,12
x = enc.Tx((packedXY[0] << 4) | (packedXY[1] >> 4));
y = enc.Ty(((packedXY[1] & 0xF) << 8) | packedXY[2]);
break;
case 16: // 16,16
x = enc.Tx((packedXY[0] << 8) | packedXY[1]);
y = enc.Ty((packedXY[2] << 8) | packedXY[3]);
break;
}
// Most significant 1 bit -> on/off curve.
onCurves[n] = (f >> 7) == 0;
controlPoints[n] = new Vector2(curX += x, curY += y);
}
}
// Read the instructions
ushort instructionSize = Read255UInt16(reader);
long position = reader.BaseStream.Position;
reader.BaseStream.Position = instructionStreamOffset;
byte[] instructions = reader.ReadBytes(instructionSize);
instructionStreamOffset += instructionSize;
reader.BaseStream.Position = position;
// Passing default here will cause the bounds to be calculated from the vector controls points.
// They can be overwritten later on for composite glyphs.
return new GlyphVector(controlPoints, onCurves, endPoints, default, instructions);
}
private static bool CompositeHasInstructions(BigEndianBinaryReader reader)
{
bool weHaveInstructions = false;
CompositeGlyphFlags flags = CompositeGlyphFlags.MoreComponents;
while ((flags & CompositeGlyphFlags.MoreComponents) != 0)
{
flags = reader.ReadUInt16<CompositeGlyphFlags>();
weHaveInstructions |= (flags & CompositeGlyphFlags.WeHaveInstructions) != 0;
int argSize = 2; // glyph index
if ((flags & CompositeGlyphFlags.Args1And2AreWords) != 0)
{
argSize += 4;
}
else
{
argSize += 2;
}
if ((flags & CompositeGlyphFlags.WeHaveAScale) != 0)
{
argSize += 2;
}
else if ((flags & CompositeGlyphFlags.WeHaveXAndYScale) != 0)
{
argSize += 4;
}
else if ((flags & CompositeGlyphFlags.WeHaveATwoByTwo) != 0)
{
argSize += 8;
}
reader.BaseStream.Seek(argSize, SeekOrigin.Current);
}
return weHaveInstructions;
}
private static GlyphVector ReadCompositeGlyphData(GlyphVector[] createdGlyphs, BigEndianBinaryReader reader)
{
GlyphVector composite = default;
CompositeGlyphFlags flags;
do
{
flags = reader.ReadUInt16<CompositeGlyphFlags>();
ushort glyphIndex = reader.ReadUInt16();
if (!createdGlyphs[glyphIndex].HasValue())
{
// This glyph has not been read yet, resolve it first.
long position = reader.BaseStream.Position;
createdGlyphs[glyphIndex] = ReadCompositeGlyphData(createdGlyphs, reader);
reader.BaseStream.Position = position;
}
CompositeGlyphLoader.LoadArguments(reader, flags, out int dx, out int dy);
Matrix3x2 transform = Matrix3x2.Identity;
transform.Translation = new Vector2(dx, dy);
if ((flags & CompositeGlyphFlags.WeHaveAScale) != 0)
{
float scale = reader.ReadF2dot14();
transform.M11 = scale;
transform.M21 = scale;
}
else if ((flags & CompositeGlyphFlags.WeHaveXAndYScale) != 0)
{
transform.M11 = reader.ReadF2dot14();
transform.M22 = reader.ReadF2dot14();
}
else if ((flags & CompositeGlyphFlags.WeHaveATwoByTwo) != 0)
{
transform.M11 = reader.ReadF2dot14();
transform.M12 = reader.ReadF2dot14();
transform.M21 = reader.ReadF2dot14();
transform.M22 = reader.ReadF2dot14();
}
// Composite bounds are read later.
composite = GlyphVector.Append(composite, GlyphVector.Transform(GlyphVector.DeepClone(createdGlyphs[glyphIndex]), transform), default);
}
while ((flags & CompositeGlyphFlags.MoreComponents) != 0);
return composite;
}
private static byte[] ExpandBitmap(byte[] orgBBoxBitmap)
{
byte[] expandArr = new byte[orgBBoxBitmap.Length * 8];
int index = 0;
for (int i = 0; i < orgBBoxBitmap.Length; i++)
{
byte b = orgBBoxBitmap[i];
expandArr[index++] = (byte)((b >> 7) & 0x1);
expandArr[index++] = (byte)((b >> 6) & 0x1);
expandArr[index++] = (byte)((b >> 5) & 0x1);
expandArr[index++] = (byte)((b >> 4) & 0x1);
expandArr[index++] = (byte)((b >> 3) & 0x1);
expandArr[index++] = (byte)((b >> 2) & 0x1);
expandArr[index++] = (byte)((b >> 1) & 0x1);
expandArr[index++] = (byte)((b >> 0) & 0x1);
}
return expandArr;
}
/// <summary>
/// Reads the UIntBase128 Data Type.
/// </summary>
/// <param name="reader">The binary reader using big endian encoding.</param>
/// <param name="result">The result as uint.</param>
/// <returns>true, if succeeded.</returns>
private static bool ReadUIntBase128(BigEndianBinaryReader reader, out uint result)
{
// UIntBase128 is a different variable length encoding of unsigned integers,
// suitable for values up to 2^(32) - 1.
// A UIntBase128 encoded number is a sequence of bytes for which the most significant bit
// is set for all but the last byte,
// and clear for the last byte.
//
// The number itself is base 128 encoded in the lower 7 bits of each byte.
// Thus, a decoding procedure for a UIntBase128 is:
// start with value = 0.
// Consume a byte, setting value = old value times 128 + (byte bitwise - and 127).
// Repeat last step until the most significant bit of byte is false.
//
// UIntBase128 encoding format allows a possibility of sub-optimal encoding,
// where e.g.the same numerical value can be represented with variable number of bytes(utilizing leading 'zeros').
// For example, the value 63 could be encoded as either one byte 0x3F or two(or more) bytes: [0x80, 0x3f].
// An encoder must not allow this to happen and must produce shortest possible encoding.
// A decoder MUST reject the font file if it encounters a UintBase128 - encoded value with leading zeros(a value that starts with the byte 0x80),
// if UintBase128 - encoded sequence is longer than 5 bytes,
// or if a UintBase128 - encoded value exceeds 232 - 1.
uint accum = 0;
result = 0;
for (int i = 0; i < 5; i++)
{
byte data_byte = reader.ReadByte();
// No leading 0's
if (i == 0 && data_byte == 0x80)
{
return false;
}
// If any of top 7 bits are set then << 7 would overflow.
if ((accum & 0xFE000000) != 0)
{
return false;
}
accum = (accum << 7) | (uint)(data_byte & 0x7F);
// Spin until most significant bit of data byte is false.
if ((data_byte & 0x80) == 0)
{
result = accum;
return true;
}
}
// UIntBase128 sequence exceeds 5 bytes.
return false;
}
/// <summary>
/// Reads the UIntBase255 Data Type.
/// </summary>
/// <param name="reader">The binary reader using big endian encoding.</param>
/// <returns>The UIntBase255 result.</returns>
private static ushort Read255UInt16(BigEndianBinaryReader reader)
{
// 255UInt16 Variable-length encoding of a 16-bit unsigned integer for optimized intermediate font data storage.
// 255UInt16 is a variable-length encoding of an unsigned integer
// in the range 0 to 65535 inclusive.
// This data type is intended to be used as intermediate representation of various font values,
// which are typically expressed as UInt16 but represent relatively small values.
// Depending on the encoded value, the length of the data field may be one to three bytes,
// where the value of the first byte either represents the small value itself or is treated as a code that defines the format of the additional byte(s).
byte code = reader.ReadByte();
if (code == WordCode)
{
int value = reader.ReadByte();
value <<= 8;
value &= 0xff00;
int value2 = reader.ReadByte();
value |= value2 & 0x00ff;
return (ushort)value;
}
else if (code == OneMoreByteCode1)
{
return (ushort)(reader.ReadByte() + LowestUCode);
}
else if (code == OneMoreByteCode2)
{
return (ushort)(reader.ReadByte() + (LowestUCode * 2));
}
else
{
return code;
}
}
private struct GlyphData
{
public readonly ushort GlyphIndex;
public readonly short NumContour;
public bool CompositeHasInstructions;
public GlyphData(ushort glyphIndex, short contourCount)
{
this.GlyphIndex = glyphIndex;
this.NumContour = contourCount;
this.CompositeHasInstructions = false;
}
}
}
}
| |
/* ====================================================================
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for Additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==================================================================== */
namespace NPOI.SS.UserModel
{
using System;
using System.Collections.Generic;
using System.Globalization;
/**
* Utility to identify built-in formats. The following is a list of the formats as
* returned by this class.<p/>
*<p/>
* 0, "General"<br/>
* 1, "0"<br/>
* 2, "0.00"<br/>
* 3, "#,##0"<br/>
* 4, "#,##0.00"<br/>
* 5, "$#,##0_);($#,##0)"<br/>
* 6, "$#,##0_);[Red]($#,##0)"<br/>
* 7, "$#,##0.00);($#,##0.00)"<br/>
* 8, "$#,##0.00_);[Red]($#,##0.00)"<br/>
* 9, "0%"<br/>
* 0xa, "0.00%"<br/>
* 0xb, "0.00E+00"<br/>
* 0xc, "# ?/?"<br/>
* 0xd, "# ??/??"<br/>
* 0xe, "m/d/yy"<br/>
* 0xf, "d-mmm-yy"<br/>
* 0x10, "d-mmm"<br/>
* 0x11, "mmm-yy"<br/>
* 0x12, "h:mm AM/PM"<br/>
* 0x13, "h:mm:ss AM/PM"<br/>
* 0x14, "h:mm"<br/>
* 0x15, "h:mm:ss"<br/>
* 0x16, "m/d/yy h:mm"<br/>
*<p/>
* // 0x17 - 0x24 reserved for international and undocumented
* 0x25, "#,##0_);(#,##0)"<br/>
* 0x26, "#,##0_);[Red](#,##0)"<br/>
* 0x27, "#,##0.00_);(#,##0.00)"<br/>
* 0x28, "#,##0.00_);[Red](#,##0.00)"<br/>
* 0x29, "_(*#,##0_);_(*(#,##0);_(* \"-\"_);_(@_)"<br/>
* 0x2a, "_($*#,##0_);_($*(#,##0);_($* \"-\"_);_(@_)"<br/>
* 0x2b, "_(*#,##0.00_);_(*(#,##0.00);_(*\"-\"??_);_(@_)"<br/>
* 0x2c, "_($*#,##0.00_);_($*(#,##0.00);_($*\"-\"??_);_(@_)"<br/>
* 0x2d, "mm:ss"<br/>
* 0x2e, "[h]:mm:ss"<br/>
* 0x2f, "mm:ss.0"<br/>
* 0x30, "##0.0E+0"<br/>
* 0x31, "@" - This is text format.<br/>
* 0x31 "text" - Alias for "@"<br/>
* <p/>
*
* @author Yegor Kozlov
*
* Modified 6/17/09 by Stanislav Shor - positive formats don't need starting '('
*
*/
public class BuiltinFormats
{
/**
* The first user-defined format starts at 164.
*/
public const int FIRST_USER_DEFINED_FORMAT_INDEX = 164;
private static String[] _formats;
/*
0 General General 18 Time h:mm AM/PM
1 Decimal 0 19 Time h:mm:ss AM/PM
2 Decimal 0.00 20 Time h:mm
3 Decimal #,##0 21 Time h:mm:ss
4 Decimal #,##0.00 2232 Date/Time M/D/YY h:mm
531 Currency "$"#,##0_);("$"#,##0) 37 Account. _(#,##0_);(#,##0)
631 Currency "$"#,##0_);[Red]("$"#,##0) 38 Account. _(#,##0_);[Red](#,##0)
731 Currency "$"#,##0.00_);("$"#,##0.00) 39 Account. _(#,##0.00_);(#,##0.00)
831 Currency "$"#,##0.00_);[Red]("$"#,##0.00) 40 Account. _(#,##0.00_);[Red](#,##0.00)
9 Percent 0% 4131 Currency _("$"* #,##0_);_("$"* (#,##0);_("$"* "-"_);_(@_)
10 Percent 0.00% 4231 33 Currency _(* #,##0_);_(* (#,##0);_(* "-"_);_(@_)
11 Scientific 0.00E+00 4331 Currency _("$"* #,##0.00_);_("$"* (#,##0.00);_("$"* "-"??_);_(@_)
12 Fraction # ?/? 4431 33 Currency _(* #,##0.00_);_(* (#,##0.00);_(* "-"??_);_(@_)
13 Fraction # ??/?? 45 Time mm:ss
1432 Date M/D/YY 46 Time [h]:mm:ss
15 Date D-MMM-YY 47 Time mm:ss.0
16 Date D-MMM 48 Scientific ##0.0E+0
17 Date MMM-YY 49 Text @
* */
static BuiltinFormats()
{
List<String> m = new List<String>();
PutFormat(m, 0, "General");
PutFormat(m, 1, "0");
PutFormat(m, 2, "0.00");
PutFormat(m, 3, "#,##0");
PutFormat(m, 4, "#,##0.00");
PutFormat(m, 5, "\"$\"#,##0_);(\"$\"#,##0)");
PutFormat(m, 6, "\"$\"#,##0_);[Red](\"$\"#,##0)");
PutFormat(m, 7, "\"$\"#,##0.00_);(\"$\"#,##0.00)");
PutFormat(m, 8, "\"$\"#,##0.00_);[Red](\"$\"#,##0.00)");
PutFormat(m, 9, "0%");
PutFormat(m, 0xa, "0.00%");
PutFormat(m, 0xb, "0.00E+00");
PutFormat(m, 0xc, "# ?/?");
PutFormat(m, 0xd, "# ??/??");
PutFormat(m, 0xe, "m/d/yy");
PutFormat(m, 0xf, "d-mmm-yy");
PutFormat(m, 0x10, "d-mmm");
PutFormat(m, 0x11, "mmm-yy");
PutFormat(m, 0x12, "h:mm AM/PM");
PutFormat(m, 0x13, "h:mm:ss AM/PM");
PutFormat(m, 0x14, "h:mm");
PutFormat(m, 0x15, "h:mm:ss");
PutFormat(m, 0x16, "m/d/yy h:mm");
// 0x17 - 0x24 reserved for international and undocumented
for (int i = 0x17; i <= 0x24; i++)
{
// TODO - one junit relies on these values which seems incorrect
PutFormat(m, i, "reserved-0x" + (i).ToString("X", CultureInfo.CurrentCulture));
}
PutFormat(m, 0x25, "#,##0_);(#,##0)");
PutFormat(m, 0x26, "#,##0_);[Red](#,##0)");
PutFormat(m, 0x27, "#,##0.00_);(#,##0.00)");
PutFormat(m, 0x28, "#,##0.00_);[Red](#,##0.00)");
PutFormat(m, 0x29, "_(\"$\"* #,##0_);_(\"$\"* (#,##0);_(\"$\"* \"-\"_);_(@_)");
PutFormat(m, 0x2a, "_(* #,##0_);_(* (#,##0);_(* \"-\"_);_(@_)");
PutFormat(m, 0x2b, "_(\"$\"* #,##0.00_);_(\"$\"* (#,##0.00);_(\"$\"* \"-\"??_);_(@_)");
PutFormat(m, 0x2c, "_(* #,##0.00_);_(* (#,##0.00);_(* \"-\"??_);_(@_)");
PutFormat(m, 0x2d, "mm:ss");
PutFormat(m, 0x2e, "[h]:mm:ss");
PutFormat(m, 0x2f, "mm:ss.0");
PutFormat(m, 0x30, "##0.0E+0");
PutFormat(m, 0x31, "@");
//String[] ss = new String[m.Count];
String[] ss = m.ToArray();
_formats = ss;
}
private static void PutFormat(List<String> m, int index, String value)
{
if (m.Count != index)
{
throw new InvalidOperationException("index " + index + " is wrong");
}
m.Add(value);
}
/**
* @deprecated (May 2009) use {@link #getAll()}
*/
[Obsolete]
public static Dictionary<int, String> GetBuiltinFormats()
{
Dictionary<int, String> result = new Dictionary<int, String>();
for (int i = 0; i < _formats.Length; i++)
{
result.Add(i, _formats[i]);
}
return result;
}
/**
* @return array of built-in data formats
*/
public static String[] GetAll()
{
return (String[])_formats.Clone();
}
/**
* Get the format string that matches the given format index
*
* @param index of a built in format
* @return string represented at index of format or <code>null</code> if there is not a built-in format at that index
*/
public static String GetBuiltinFormat(int index)
{
if (index < 0 || index >= _formats.Length)
{
return null;
}
return _formats[index];
}
/**
* Get the format index that matches the given format string.
*
* <p>
* Automatically converts "text" to excel's format string to represent text.
* </p>
* @param pFmt string matching a built-in format
* @return index of format or -1 if undefined.
*/
public static int GetBuiltinFormat(String pFmt)
{
String fmt;
if (string.Compare(pFmt, ("TEXT"), StringComparison.OrdinalIgnoreCase) == 0)
{
fmt = "@";
}
else
{
fmt = pFmt;
}
for (int i = 0; i < _formats.Length; i++)
{
if (fmt.Equals(_formats[i]))
{
return i;
}
}
return -1;
}
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Diagnostics.Contracts;
using System.Runtime.InteropServices;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
namespace System.IO.Pipes
{
public abstract partial class PipeStream : Stream
{
private SafePipeHandle _handle;
private bool _canRead;
private bool _canWrite;
private bool _isAsync;
private bool _isMessageComplete;
private bool _isFromExistingHandle;
private bool _isHandleExposed;
private PipeTransmissionMode _readMode;
private PipeTransmissionMode _transmissionMode;
private PipeDirection _pipeDirection;
private int _outBufferSize;
private PipeState _state;
private StreamAsyncHelper _streamAsyncHelper;
protected PipeStream(PipeDirection direction, int bufferSize)
{
if (direction < PipeDirection.In || direction > PipeDirection.InOut)
{
throw new ArgumentOutOfRangeException("direction", SR.ArgumentOutOfRange_DirectionModeInOutOrInOut);
}
if (bufferSize < 0)
{
throw new ArgumentOutOfRangeException("bufferSize", SR.ArgumentOutOfRange_NeedNonNegNum);
}
Init(direction, PipeTransmissionMode.Byte, bufferSize);
}
protected PipeStream(PipeDirection direction, PipeTransmissionMode transmissionMode, int outBufferSize)
{
if (direction < PipeDirection.In || direction > PipeDirection.InOut)
{
throw new ArgumentOutOfRangeException("direction", SR.ArgumentOutOfRange_DirectionModeInOutOrInOut);
}
if (transmissionMode < PipeTransmissionMode.Byte || transmissionMode > PipeTransmissionMode.Message)
{
throw new ArgumentOutOfRangeException("transmissionMode", SR.ArgumentOutOfRange_TransmissionModeByteOrMsg);
}
if (outBufferSize < 0)
{
throw new ArgumentOutOfRangeException("outBufferSize", SR.ArgumentOutOfRange_NeedNonNegNum);
}
Init(direction, transmissionMode, outBufferSize);
}
private void Init(PipeDirection direction, PipeTransmissionMode transmissionMode, int outBufferSize)
{
Debug.Assert(direction >= PipeDirection.In && direction <= PipeDirection.InOut, "invalid pipe direction");
Debug.Assert(transmissionMode >= PipeTransmissionMode.Byte && transmissionMode <= PipeTransmissionMode.Message, "transmissionMode is out of range");
Debug.Assert(outBufferSize >= 0, "outBufferSize is negative");
// always defaults to this until overridden
_readMode = transmissionMode;
_transmissionMode = transmissionMode;
_pipeDirection = direction;
if ((_pipeDirection & PipeDirection.In) != 0)
{
_canRead = true;
}
if ((_pipeDirection & PipeDirection.Out) != 0)
{
_canWrite = true;
}
_outBufferSize = outBufferSize;
// This should always default to true
_isMessageComplete = true;
_state = PipeState.WaitingToConnect;
_streamAsyncHelper = new StreamAsyncHelper(this);
}
// Once a PipeStream has a handle ready, it should call this method to set up the PipeStream. If
// the pipe is in a connected state already, it should also set the IsConnected (protected) property.
// This method may also be called to uninitialize a handle, setting it to null.
[SecuritySafeCritical]
internal void InitializeHandle(SafePipeHandle handle, bool isExposed, bool isAsync)
{
if (isAsync && handle != null)
{
InitializeAsyncHandle(handle);
}
_handle = handle;
_isAsync = isAsync;
// track these separately; _isHandleExposed will get updated if accessed though the property
_isHandleExposed = isExposed;
_isFromExistingHandle = isExposed;
}
[SecurityCritical]
public override int Read([In, Out] byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException("buffer", SR.ArgumentNull_Buffer);
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (buffer.Length - offset < count)
{
throw new ArgumentException(SR.Argument_InvalidOffLen);
}
if (!CanRead)
{
throw __Error.GetReadNotSupported();
}
CheckReadOperations();
return ReadCore(buffer, offset, count);
}
[SecuritySafeCritical]
public override Task<int> ReadAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (buffer == null)
throw new ArgumentNullException("buffer", SR.ArgumentNull_Buffer);
if (offset < 0)
throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
if (buffer.Length - offset < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
Contract.EndContractBlock();
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled<int>(cancellationToken);
}
CheckReadOperations();
if (!_isAsync)
{
return base.ReadAsync(buffer, offset, count, cancellationToken);
}
return ReadAsyncCore(buffer, offset, count, cancellationToken);
}
[SecurityCritical]
public override void Write(byte[] buffer, int offset, int count)
{
if (buffer == null)
{
throw new ArgumentNullException("buffer", SR.ArgumentNull_Buffer);
}
if (offset < 0)
{
throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (count < 0)
{
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
}
if (buffer.Length - offset < count)
{
throw new ArgumentException(SR.Argument_InvalidOffLen);
}
if (!CanWrite)
{
throw __Error.GetWriteNotSupported();
}
CheckWriteOperations();
WriteCore(buffer, offset, count);
return;
}
[SecuritySafeCritical]
public override Task WriteAsync(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
if (buffer == null)
throw new ArgumentNullException("buffer", SR.ArgumentNull_Buffer);
if (offset < 0)
throw new ArgumentOutOfRangeException("offset", SR.ArgumentOutOfRange_NeedNonNegNum);
if (count < 0)
throw new ArgumentOutOfRangeException("count", SR.ArgumentOutOfRange_NeedNonNegNum);
if (buffer.Length - offset < count)
throw new ArgumentException(SR.Argument_InvalidOffLen);
Contract.EndContractBlock();
if (cancellationToken.IsCancellationRequested)
{
return Task.FromCanceled<int>(cancellationToken);
}
CheckWriteOperations();
if (!_isAsync)
{
return base.WriteAsync(buffer, offset, count, cancellationToken);
}
return WriteAsyncCore(buffer, offset, count, cancellationToken);
}
[ThreadStatic]
private static byte[] t_singleByteArray;
private static byte[] SingleByteArray
{
get { return t_singleByteArray ?? (t_singleByteArray = new byte[1]); }
}
// Reads a byte from the pipe stream. Returns the byte cast to an int
// or -1 if the connection has been broken.
[SecurityCritical]
public override int ReadByte()
{
CheckReadOperations();
if (!CanRead)
{
throw __Error.GetReadNotSupported();
}
byte[] buffer = SingleByteArray;
int n = ReadCore(buffer, 0, 1);
if (n == 0) { return -1; }
else return (int)buffer[0];
}
[SecurityCritical]
public override void WriteByte(byte value)
{
CheckWriteOperations();
if (!CanWrite)
{
throw __Error.GetWriteNotSupported();
}
byte[] buffer = SingleByteArray;
buffer[0] = value;
WriteCore(buffer, 0, 1);
}
// Does nothing on PipeStreams. We cannot call Interop.FlushFileBuffers here because we can deadlock
// if the other end of the pipe is no longer interested in reading from the pipe.
[SecurityCritical]
public override void Flush()
{
CheckWriteOperations();
if (!CanWrite)
{
throw __Error.GetWriteNotSupported();
}
}
[SecurityCritical]
protected override void Dispose(bool disposing)
{
try
{
// Nothing will be done differently based on whether we are
// disposing vs. finalizing.
if (_handle != null && !_handle.IsClosed)
{
_handle.Dispose();
}
UninitializeAsyncHandle();
}
finally
{
base.Dispose(disposing);
}
_state = PipeState.Closed;
}
// ********************** Public Properties *********************** //
// APIs use coarser definition of connected, but these map to internal
// Connected/Disconnected states. Note that setter is protected; only
// intended to be called by custom PipeStream concrete children
public bool IsConnected
{
get
{
return State == PipeState.Connected;
}
protected set
{
_state = (value) ? PipeState.Connected : PipeState.Disconnected;
}
}
public bool IsAsync
{
get { return _isAsync; }
}
// Set by the most recent call to Read or EndRead. Will be false if there are more buffer in the
// message, otherwise it is set to true.
public bool IsMessageComplete
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
get
{
// omitting pipe broken exception to allow reader to finish getting message
if (_state == PipeState.WaitingToConnect)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected);
}
if (_state == PipeState.Disconnected)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeDisconnected);
}
if (CheckOperationsRequiresSetHandle && _handle == null)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet);
}
if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed))
{
throw __Error.GetPipeNotOpen();
}
// don't need to check transmission mode; just care about read mode. Always use
// cached mode; otherwise could throw for valid message when other side is shutting down
if (_readMode != PipeTransmissionMode.Message)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeReadModeNotMessage);
}
return _isMessageComplete;
}
}
public SafePipeHandle SafePipeHandle
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
get
{
if (_handle == null)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet);
}
if (_handle.IsClosed)
{
throw __Error.GetPipeNotOpen();
}
_isHandleExposed = true;
return _handle;
}
}
internal SafePipeHandle InternalHandle
{
[SecurityCritical]
get
{
return _handle;
}
}
internal bool IsHandleExposed
{
get
{
return _isHandleExposed;
}
}
public override bool CanRead
{
[Pure]
get
{
return _canRead;
}
}
public override bool CanWrite
{
[Pure]
get
{
return _canWrite;
}
}
public override bool CanSeek
{
[Pure]
get
{
return false;
}
}
public override long Length
{
get
{
throw __Error.GetSeekNotSupported();
}
}
public override long Position
{
get
{
throw __Error.GetSeekNotSupported();
}
set
{
throw __Error.GetSeekNotSupported();
}
}
public override void SetLength(long value)
{
throw __Error.GetSeekNotSupported();
}
public override long Seek(long offset, SeekOrigin origin)
{
throw __Error.GetSeekNotSupported();
}
// anonymous pipe ends and named pipe server can get/set properties when broken
// or connected. Named client overrides
[SecurityCritical]
internal virtual void CheckPipePropertyOperations()
{
if (CheckOperationsRequiresSetHandle && _handle == null)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet);
}
// these throw object disposed
if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed))
{
throw __Error.GetPipeNotOpen();
}
}
// Reads can be done in Connected and Broken. In the latter,
// read returns 0 bytes
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")]
internal void CheckReadOperations()
{
// Invalid operation
if (_state == PipeState.WaitingToConnect)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected);
}
if (_state == PipeState.Disconnected)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeDisconnected);
}
if (CheckOperationsRequiresSetHandle && _handle == null)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet);
}
// these throw object disposed
if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed))
{
throw __Error.GetPipeNotOpen();
}
}
// Writes can only be done in connected state
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Consistent with security model")]
internal void CheckWriteOperations()
{
// Invalid operation
if (_state == PipeState.WaitingToConnect)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeNotYetConnected);
}
if (_state == PipeState.Disconnected)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeDisconnected);
}
if (CheckOperationsRequiresSetHandle && _handle == null)
{
throw new InvalidOperationException(SR.InvalidOperation_PipeHandleNotSet);
}
// IOException
if (_state == PipeState.Broken)
{
throw new IOException(SR.IO_PipeBroken);
}
// these throw object disposed
if ((_state == PipeState.Closed) || (_handle != null && _handle.IsClosed))
{
throw __Error.GetPipeNotOpen();
}
}
internal PipeState State
{
get
{
return _state;
}
set
{
_state = value;
}
}
}
}
| |
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Apache.Ignite.Core.Tests.Cache.Query
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
using System.Text;
using Apache.Ignite.Core.Binary;
using Apache.Ignite.Core.Cache;
using Apache.Ignite.Core.Cache.Query;
using Apache.Ignite.Core.Common;
using Apache.Ignite.Core.Impl;
using Apache.Ignite.Core.Impl.Binary;
using NUnit.Framework;
/// <summary>
/// Queries tests.
/// </summary>
public class CacheQueriesTest
{
/** Grid count. */
private const int GridCnt = 2;
/** Cache name. */
private const string CacheName = "cache";
/** Path to XML configuration. */
private const string CfgPath = "config\\cache-query.xml";
/** Maximum amount of items in cache. */
private const int MaxItemCnt = 100;
/// <summary>
///
/// </summary>
[TestFixtureSetUp]
public virtual void StartGrids()
{
TestUtils.JvmDebug = true;
TestUtils.KillProcesses();
IgniteConfigurationEx cfg = new IgniteConfigurationEx
{
BinaryConfiguration = new BinaryConfiguration
{
TypeConfigurations = new[]
{
new BinaryTypeConfiguration(typeof (QueryPerson)),
new BinaryTypeConfiguration(typeof (BinarizableScanQueryFilter<QueryPerson>)),
new BinaryTypeConfiguration(typeof (BinarizableScanQueryFilter<BinaryObject>))
}
},
JvmClasspath = TestUtils.CreateTestClasspath(),
JvmOptions = TestUtils.TestJavaOptions(),
SpringConfigUrl = CfgPath
};
for (int i = 0; i < GridCnt; i++)
{
cfg.GridName = "grid-" + i;
Ignition.Start(cfg);
}
}
/// <summary>
///
/// </summary>
[TestFixtureTearDown]
public virtual void StopGrids()
{
for (int i = 0; i < GridCnt; i++)
Ignition.Stop("grid-" + i, true);
}
/// <summary>
///
/// </summary>
[SetUp]
public virtual void BeforeTest()
{
Console.WriteLine("Test started: " + TestContext.CurrentContext.Test.Name);
}
/// <summary>
///
/// </summary>
[TearDown]
public virtual void AfterTest()
{
var cache = Cache();
for (int i = 0; i < GridCnt; i++)
{
for (int j = 0; j < MaxItemCnt; j++)
cache.Remove(j);
Assert.IsTrue(cache.IsEmpty());
}
TestUtils.AssertHandleRegistryIsEmpty(300,
Enumerable.Range(0, GridCnt).Select(x => Ignition.GetIgnite("grid-" + x)).ToArray());
Console.WriteLine("Test finished: " + TestContext.CurrentContext.Test.Name);
}
/// <summary>
///
/// </summary>
/// <param name="idx"></param>
/// <returns></returns>
public IIgnite GetIgnite(int idx)
{
return Ignition.GetIgnite("grid-" + idx);
}
/// <summary>
///
/// </summary>
/// <param name="idx"></param>
/// <returns></returns>
public ICache<int, QueryPerson> Cache(int idx)
{
return GetIgnite(idx).GetCache<int, QueryPerson>(CacheName);
}
/// <summary>
///
/// </summary>
/// <returns></returns>
public ICache<int, QueryPerson> Cache()
{
return Cache(0);
}
/// <summary>
/// Test arguments validation for SQL queries.
/// </summary>
[Test]
public void TestValidationSql()
{
// 1. No sql.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new SqlQuery(typeof(QueryPerson), null)); });
// 2. No type.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new SqlQuery((string)null, "age >= 50")); });
}
/// <summary>
/// Test arguments validation for SQL fields queries.
/// </summary>
[Test]
public void TestValidationSqlFields()
{
// 1. No sql.
Assert.Throws<ArgumentException>(() => { Cache().QueryFields(new SqlFieldsQuery(null)); });
}
/// <summary>
/// Test arguments validation for TEXT queries.
/// </summary>
[Test]
public void TestValidationText()
{
// 1. No text.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new TextQuery(typeof(QueryPerson), null)); });
// 2. No type.
Assert.Throws<ArgumentException>(() =>
{ Cache().Query(new TextQuery((string)null, "Ivanov")); });
}
/// <summary>
/// Cursor tests.
/// </summary>
[Test]
[SuppressMessage("ReSharper", "ReturnValueOfPureMethodIsNotUsed")]
public void TestCursor()
{
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(1, new QueryPerson("Petrov", 40));
Cache().Put(1, new QueryPerson("Sidorov", 50));
SqlQuery qry = new SqlQuery(typeof(QueryPerson), "age >= 20");
// 1. Test GetAll().
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor = Cache().Query(qry))
{
cursor.GetAll();
Assert.Throws<InvalidOperationException>(() => { cursor.GetAll(); });
Assert.Throws<InvalidOperationException>(() => { cursor.GetEnumerator(); });
}
// 2. Test GetEnumerator.
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor = Cache().Query(qry))
{
cursor.GetEnumerator();
Assert.Throws<InvalidOperationException>(() => { cursor.GetAll(); });
Assert.Throws<InvalidOperationException>(() => { cursor.GetEnumerator(); });
}
}
/// <summary>
/// Test enumerator.
/// </summary>
[Test]
[SuppressMessage("ReSharper", "UnusedVariable")]
public void TestEnumerator()
{
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(2, new QueryPerson("Petrov", 40));
Cache().Put(3, new QueryPerson("Sidorov", 50));
Cache().Put(4, new QueryPerson("Unknown", 60));
// 1. Empty result set.
using (
IQueryCursor<ICacheEntry<int, QueryPerson>> cursor =
Cache().Query(new SqlQuery(typeof(QueryPerson), "age = 100")))
{
IEnumerator<ICacheEntry<int, QueryPerson>> e = cursor.GetEnumerator();
Assert.Throws<InvalidOperationException>(() =>
{ ICacheEntry<int, QueryPerson> entry = e.Current; });
Assert.IsFalse(e.MoveNext());
Assert.Throws<InvalidOperationException>(() =>
{ ICacheEntry<int, QueryPerson> entry = e.Current; });
Assert.Throws<NotSupportedException>(() => e.Reset());
}
SqlQuery qry = new SqlQuery(typeof (QueryPerson), "age < 60");
// 2. Page size is bigger than result set.
qry.PageSize = 4;
CheckEnumeratorQuery(qry);
// 3. Page size equal to result set.
qry.PageSize = 3;
CheckEnumeratorQuery(qry);
// 4. Page size if less than result set.
qry.PageSize = 2;
CheckEnumeratorQuery(qry);
}
/// <summary>
/// Test SQL query arguments passing.
/// </summary>
public void TestSqlQueryArguments()
{
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(2, new QueryPerson("Petrov", 40));
Cache().Put(3, new QueryPerson("Sidorov", 50));
// 1. Empty result set.
using (
IQueryCursor<ICacheEntry<int, QueryPerson>> cursor =
Cache().Query(new SqlQuery(typeof(QueryPerson), "age < ?", 50)))
{
foreach (ICacheEntry<int, QueryPerson> entry in cursor.GetAll())
Assert.IsTrue(entry.Key == 1 || entry.Key == 2);
}
}
/// <summary>
/// Test SQL fields query arguments passing.
/// </summary>
public void TestSqlFieldsQueryArguments()
{
Cache().Put(1, new QueryPerson("Ivanov", 30));
Cache().Put(2, new QueryPerson("Petrov", 40));
Cache().Put(3, new QueryPerson("Sidorov", 50));
// 1. Empty result set.
using (
IQueryCursor<IList> cursor = Cache().QueryFields(
new SqlFieldsQuery("SELECT age FROM QueryPerson WHERE age < ?", 50)))
{
foreach (IList entry in cursor.GetAll())
Assert.IsTrue((int) entry[0] < 50);
}
}
/// <summary>
/// Check query result for enumerator test.
/// </summary>
/// <param name="qry">QUery.</param>
private void CheckEnumeratorQuery(SqlQuery qry)
{
using (IQueryCursor<ICacheEntry<int, QueryPerson>> cursor = Cache().Query(qry))
{
bool first = false;
bool second = false;
bool third = false;
foreach (var entry in cursor)
{
if (entry.Key == 1)
{
first = true;
Assert.AreEqual("Ivanov", entry.Value.Name);
Assert.AreEqual(30, entry.Value.Age);
}
else if (entry.Key == 2)
{
second = true;
Assert.AreEqual("Petrov", entry.Value.Name);
Assert.AreEqual(40, entry.Value.Age);
}
else if (entry.Key == 3)
{
third = true;
Assert.AreEqual("Sidorov", entry.Value.Name);
Assert.AreEqual(50, entry.Value.Age);
}
else
Assert.Fail("Unexpected value: " + entry);
}
Assert.IsTrue(first && second && third);
}
}
/// <summary>
/// Check SQL query.
/// </summary>
[Test]
public void TestSqlQuery()
{
CheckSqlQuery(MaxItemCnt, false, false);
}
/// <summary>
/// Check SQL query in binary mode.
/// </summary>
[Test]
public void TestSqlQueryBinary()
{
CheckSqlQuery(MaxItemCnt, false, true);
}
/// <summary>
/// Check local SQL query.
/// </summary>
[Test]
public void TestSqlQueryLocal()
{
CheckSqlQuery(MaxItemCnt, true, false);
}
/// <summary>
/// Check local SQL query in binary mode.
/// </summary>
[Test]
public void TestSqlQueryLocalBinary()
{
CheckSqlQuery(MaxItemCnt, true, true);
}
/// <summary>
/// Check SQL query.
/// </summary>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private void CheckSqlQuery(int cnt, bool loc, bool keepBinary)
{
var cache = Cache();
// 1. Populate cache with data, calculating expected count in parallel.
var exp = PopulateCache(cache, loc, cnt, x => x < 50);
// 2. Validate results.
SqlQuery qry = loc ? new SqlQuery(typeof(QueryPerson), "age < 50", true) :
new SqlQuery(typeof(QueryPerson), "age < 50");
ValidateQueryResults(cache, qry, exp, keepBinary);
}
/// <summary>
/// Check SQL fields query.
/// </summary>
[Test]
public void TestSqlFieldsQuery()
{
CheckSqlFieldsQuery(MaxItemCnt, false);
}
/// <summary>
/// Check local SQL fields query.
/// </summary>
[Test]
public void TestSqlFieldsQueryLocal()
{
CheckSqlFieldsQuery(MaxItemCnt, true);
}
/// <summary>
/// Check SQL fields query.
/// </summary>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
private void CheckSqlFieldsQuery(int cnt, bool loc)
{
var cache = Cache();
// 1. Populate cache with data, calculating expected count in parallel.
var exp = PopulateCache(cache, loc, cnt, x => x < 50);
// 2. Vlaidate results.
SqlFieldsQuery qry = loc ? new SqlFieldsQuery("SELECT name, age FROM QueryPerson WHERE age < 50", true) :
new SqlFieldsQuery("SELECT name, age FROM QueryPerson WHERE age < 50");
using (IQueryCursor<IList> cursor = cache.QueryFields(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
foreach (var entry in cursor.GetAll())
{
Assert.AreEqual(2, entry.Count);
Assert.AreEqual(entry[0].ToString(), entry[1].ToString());
exp0.Remove((int)entry[1]);
}
Assert.AreEqual(0, exp0.Count);
}
using (IQueryCursor<IList> cursor = cache.QueryFields(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
foreach (var entry in cursor)
{
Assert.AreEqual(entry[0].ToString(), entry[1].ToString());
exp0.Remove((int)entry[1]);
}
Assert.AreEqual(0, exp0.Count);
}
}
/// <summary>
/// Check text query.
/// </summary>
[Test]
public void TestTextQuery()
{
CheckTextQuery(MaxItemCnt, false, false);
}
/// <summary>
/// Check SQL query in binary mode.
/// </summary>
[Test]
public void TestTextQueryBinary()
{
CheckTextQuery(MaxItemCnt, false, true);
}
/// <summary>
/// Check local SQL query.
/// </summary>
[Test]
public void TestTextQueryLocal()
{
CheckTextQuery(MaxItemCnt, true, false);
}
/// <summary>
/// Check local SQL query in binary mode.
/// </summary>
[Test]
public void TestTextQueryLocalBinary()
{
CheckTextQuery(MaxItemCnt, true, true);
}
/// <summary>
/// Check text query.
/// </summary>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private void CheckTextQuery(int cnt, bool loc, bool keepBinary)
{
var cache = Cache();
// 1. Populate cache with data, calculating expected count in parallel.
var exp = PopulateCache(cache, loc, cnt, x => x.ToString().StartsWith("1"));
// 2. Validate results.
TextQuery qry = loc ? new TextQuery(typeof(QueryPerson), "1*", true) :
new TextQuery(typeof(QueryPerson), "1*");
ValidateQueryResults(cache, qry, exp, keepBinary);
}
/// <summary>
/// Check scan query.
/// </summary>
[Test]
public void TestScanQuery()
{
CheckScanQuery<QueryPerson>(MaxItemCnt, false, false);
}
/// <summary>
/// Check scan query in binary mode.
/// </summary>
[Test]
public void TestScanQueryBinary()
{
CheckScanQuery<BinaryObject>(MaxItemCnt, false, true);
}
/// <summary>
/// Check local scan query.
/// </summary>
[Test]
public void TestScanQueryLocal()
{
CheckScanQuery<QueryPerson>(MaxItemCnt, true, false);
}
/// <summary>
/// Check local scan query in binary mode.
/// </summary>
[Test]
public void TestScanQueryLocalBinary()
{
CheckScanQuery<BinaryObject>(MaxItemCnt, true, true);
}
/// <summary>
/// Check scan query with partitions.
/// </summary>
[Test]
public void TestScanQueryPartitions([Values(true, false)] bool loc)
{
CheckScanQueryPartitions<QueryPerson>(MaxItemCnt, loc, false);
}
/// <summary>
/// Check scan query with partitions in binary mode.
/// </summary>
[Test]
public void TestScanQueryPartitionsBinary([Values(true, false)] bool loc)
{
CheckScanQueryPartitions<BinaryObject>(MaxItemCnt, loc, true);
}
/// <summary>
/// Tests that query attempt on non-indexed cache causes an exception.
/// </summary>
[Test]
public void TestIndexingDisabledError()
{
var cache = GetIgnite(0).GetOrCreateCache<int, QueryPerson>("nonindexed_cache");
var queries = new QueryBase[]
{
new TextQuery(typeof (QueryPerson), "1*"),
new SqlQuery(typeof (QueryPerson), "age < 50")
};
foreach (var qry in queries)
{
var err = Assert.Throws<IgniteException>(() => cache.Query(qry));
Assert.AreEqual("Indexing is disabled for cache: nonindexed_cache. " +
"Use setIndexedTypes or setTypeMetadata methods on CacheConfiguration to enable.", err.Message);
}
}
/// <summary>
/// Check scan query.
/// </summary>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private void CheckScanQuery<TV>(int cnt, bool loc, bool keepBinary)
{
var cache = Cache();
// No predicate
var exp = PopulateCache(cache, loc, cnt, x => true);
var qry = new ScanQuery<int, TV>();
ValidateQueryResults(cache, qry, exp, keepBinary);
// Serializable
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new ScanQueryFilter<TV>());
ValidateQueryResults(cache, qry, exp, keepBinary);
// Binarizable
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new BinarizableScanQueryFilter<TV>());
ValidateQueryResults(cache, qry, exp, keepBinary);
// Invalid
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new InvalidScanQueryFilter<TV>());
Assert.Throws<BinaryObjectException>(() => ValidateQueryResults(cache, qry, exp, keepBinary));
// Exception
exp = PopulateCache(cache, loc, cnt, x => x < 50);
qry = new ScanQuery<int, TV>(new ScanQueryFilter<TV> {ThrowErr = true});
var ex = Assert.Throws<IgniteException>(() => ValidateQueryResults(cache, qry, exp, keepBinary));
Assert.AreEqual(ScanQueryFilter<TV>.ErrMessage, ex.Message);
}
/// <summary>
/// Checks scan query with partitions.
/// </summary>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private void CheckScanQueryPartitions<TV>(int cnt, bool loc, bool keepBinary)
{
StopGrids();
StartGrids();
var cache = Cache();
var aff = cache.Ignite.GetAffinity(CacheName);
var exp = PopulateCache(cache, loc, cnt, x => true); // populate outside the loop (slow)
for (var part = 0; part < aff.Partitions; part++)
{
//var exp0 = new HashSet<int>(exp.Where(x => aff.Partition(x) == part)); // filter expected keys
var exp0 = new HashSet<int>();
foreach (var x in exp)
if (aff.GetPartition(x) == part)
exp0.Add(x);
var qry = new ScanQuery<int, TV> { Partition = part };
Console.WriteLine("Checking query on partition " + part);
ValidateQueryResults(cache, qry, exp0, keepBinary);
}
// Partitions with predicate
exp = PopulateCache(cache, loc, cnt, x => x < 50); // populate outside the loop (slow)
for (var part = 0; part < aff.Partitions; part++)
{
//var exp0 = new HashSet<int>(exp.Where(x => aff.Partition(x) == part)); // filter expected keys
var exp0 = new HashSet<int>();
foreach (var x in exp)
if (aff.GetPartition(x) == part)
exp0.Add(x);
var qry = new ScanQuery<int, TV>(new ScanQueryFilter<TV>()) { Partition = part };
Console.WriteLine("Checking predicate query on partition " + part);
ValidateQueryResults(cache, qry, exp0, keepBinary);
}
}
/// <summary>
/// Validates the query results.
/// </summary>
/// <param name="cache">Cache.</param>
/// <param name="qry">Query.</param>
/// <param name="exp">Expected keys.</param>
/// <param name="keepBinary">Keep binary flag.</param>
private static void ValidateQueryResults(ICache<int, QueryPerson> cache, QueryBase qry, HashSet<int> exp,
bool keepBinary)
{
if (keepBinary)
{
var cache0 = cache.WithKeepBinary<int, IBinaryObject>();
using (var cursor = cache0.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor.GetAll())
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.GetField<string>("name"));
Assert.AreEqual(entry.Key, entry.Value.GetField<int>("age"));
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
using (var cursor = cache0.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor)
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.GetField<string>("name"));
Assert.AreEqual(entry.Key, entry.Value.GetField<int>("age"));
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
}
else
{
using (var cursor = cache.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor.GetAll())
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.Name);
Assert.AreEqual(entry.Key, entry.Value.Age);
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
using (var cursor = cache.Query(qry))
{
HashSet<int> exp0 = new HashSet<int>(exp);
var all = new List<ICacheEntry<int, object>>();
foreach (var entry in cursor)
{
all.Add(entry);
Assert.AreEqual(entry.Key.ToString(), entry.Value.Name);
Assert.AreEqual(entry.Key, entry.Value.Age);
exp0.Remove(entry.Key);
}
AssertMissingExpectedKeys(exp0, cache, all);
}
}
}
/// <summary>
/// Asserts that all expected entries have been received.
/// </summary>
private static void AssertMissingExpectedKeys(ICollection<int> exp, ICache<int, QueryPerson> cache,
IList<ICacheEntry<int, object>> all)
{
if (exp.Count == 0)
return;
var sb = new StringBuilder();
var aff = cache.Ignite.GetAffinity(cache.Name);
foreach (var key in exp)
{
var part = aff.GetPartition(key);
sb.AppendFormat(
"Query did not return expected key '{0}' (exists: {1}), partition '{2}', partition nodes: ",
key, cache.Get(key) != null, part);
var partNodes = aff.MapPartitionToPrimaryAndBackups(part);
foreach (var node in partNodes)
sb.Append(node).Append(" ");
sb.AppendLine(";");
}
sb.Append("Returned keys: ");
foreach (var e in all)
sb.Append(e.Key).Append(" ");
sb.AppendLine(";");
Assert.Fail(sb.ToString());
}
/// <summary>
/// Populates the cache with random entries and returns expected results set according to filter.
/// </summary>
/// <param name="cache">The cache.</param>
/// <param name="cnt">Amount of cache entries to create.</param>
/// <param name="loc">Local query flag.</param>
/// <param name="expectedEntryFilter">The expected entry filter.</param>
/// <returns>Expected results set.</returns>
private static HashSet<int> PopulateCache(ICache<int, QueryPerson> cache, bool loc, int cnt,
Func<int, bool> expectedEntryFilter)
{
var rand = new Random();
var exp = new HashSet<int>();
for (var i = 0; i < cnt; i++)
{
var val = rand.Next(100);
cache.Put(val, new QueryPerson(val.ToString(), val));
if (expectedEntryFilter(val) && (!loc || cache.Ignite.GetAffinity(cache.Name)
.IsPrimary(cache.Ignite.GetCluster().GetLocalNode(), val)))
exp.Add(val);
}
return exp;
}
}
/// <summary>
/// Person.
/// </summary>
public class QueryPerson
{
/// <summary>
/// Constructor.
/// </summary>
public QueryPerson()
{
// No-op.
}
/// <summary>
/// Constructor.
/// </summary>
/// <param name="name">Name.</param>
/// <param name="age">Age.</param>
public QueryPerson(string name, int age)
{
Name = name;
Age = age;
}
/// <summary>
/// Name.
/// </summary>
public string Name { get; set; }
/// <summary>
/// Age.
/// </summary>
public int Age { get; set; }
}
/// <summary>
/// Query filter.
/// </summary>
[Serializable]
public class ScanQueryFilter<TV> : ICacheEntryFilter<int, TV>
{
// Error message
public const string ErrMessage = "Error in ScanQueryFilter.Invoke";
// Error flag
public bool ThrowErr { get; set; }
/** <inheritdoc /> */
public bool Invoke(ICacheEntry<int, TV> entry)
{
if (ThrowErr)
throw new Exception(ErrMessage);
return entry.Key < 50;
}
}
/// <summary>
/// binary query filter.
/// </summary>
public class BinarizableScanQueryFilter<TV> : ScanQueryFilter<TV>, IBinarizable
{
/** <inheritdoc /> */
public void WriteBinary(IBinaryWriter writer)
{
var w = writer.GetRawWriter();
w.WriteBoolean(ThrowErr);
}
/** <inheritdoc /> */
public void ReadBinary(IBinaryReader reader)
{
var r = reader.GetRawReader();
ThrowErr = r.ReadBoolean();
}
}
/// <summary>
/// Filter that can't be serialized.
/// </summary>
public class InvalidScanQueryFilter<TV> : ScanQueryFilter<TV>
{
// No-op.
}
}
| |
#if UNITY_EDITOR
using System;
using System.Collections.Generic;
using System.Linq;
using System.Reflection;
using UnityEditor;
using UnityEngine;
using Object = UnityEngine.Object;
using UMA;
using UMA.Integrations;
using UMACharacterSystem;
namespace UMAEditor
{
public partial class RecipeEditor
{
// Drop area for compatible Races
private void CompatibleRacesDropArea(Rect dropArea, List<string> compatibleRaces)
{
Event evt = Event.current;
//make the box clickable so that the user can select raceData assets from the asset selection window
if (evt.type == EventType.MouseUp)
{
if (dropArea.Contains(evt.mousePosition))
{
compatibleRacePickerID = EditorGUIUtility.GetControlID(new GUIContent("crfObjectPicker"), FocusType.Passive);
EditorGUIUtility.ShowObjectPicker<RaceData>(null, false, "", compatibleRacePickerID);
Event.current.Use();//stops the Mismatched LayoutGroup errors
return;
}
}
if (evt.commandName == "ObjectSelectorUpdated" && EditorGUIUtility.GetObjectPickerControlID() == compatibleRacePickerID)
{
RaceData tempRaceDataAsset = EditorGUIUtility.GetObjectPickerObject() as RaceData;
if (tempRaceDataAsset)
{
AddRaceDataAsset(tempRaceDataAsset, compatibleRaces);
}
Event.current.Use();//stops the Mismatched LayoutGroup errors
return;
}
if (evt.type == EventType.DragUpdated)
{
if (dropArea.Contains(evt.mousePosition))
{
DragAndDrop.visualMode = DragAndDropVisualMode.Copy;
}
}
if (evt.type == EventType.DragPerform)
{
if (dropArea.Contains(evt.mousePosition))
{
DragAndDrop.AcceptDrag();
UnityEngine.Object[] draggedObjects = DragAndDrop.objectReferences as UnityEngine.Object[];
for (int i = 0; i < draggedObjects.Length; i++)
{
if (draggedObjects[i])
{
RaceData tempRaceDataAsset = draggedObjects[i] as RaceData;
if (tempRaceDataAsset)
{
AddRaceDataAsset(tempRaceDataAsset, compatibleRaces);
continue;
}
var path = AssetDatabase.GetAssetPath(draggedObjects[i]);
if (System.IO.Directory.Exists(path))
{
RecursiveScanFoldersForAssets(path, compatibleRaces);
}
}
}
}
}
}
private void RecursiveScanFoldersForAssets(string path, List<string> compatibleRaces)
{
var assetFiles = System.IO.Directory.GetFiles(path, "*.asset");
foreach (var assetFile in assetFiles)
{
var tempRaceDataAsset = AssetDatabase.LoadAssetAtPath(assetFile, typeof(RaceData)) as RaceData;
if (tempRaceDataAsset)
{
AddRaceDataAsset(tempRaceDataAsset, compatibleRaces);
}
}
foreach (var subFolder in System.IO.Directory.GetDirectories(path))
{
RecursiveScanFoldersForAssets(subFolder.Replace('\\', '/'), compatibleRaces);
}
}
private void AddRaceDataAsset(RaceData raceDataAsset, List<string> compatibleRaces)
{
if (!compatibleRaces.Contains(raceDataAsset.raceName))
compatibleRaces.Add(raceDataAsset.raceName);
}
//this needs to generate labels too because the values are not the same as the labels
private int GenerateWardrobeSlotsEnum(string selectedOption, List<string> compatibleRaces = null, bool forceUpdate = false)
{
int selectedIndex = 0;
if (compatibleRaces == null)
{
selectedIndex = -1;
generatedWardrobeSlotOptionsLabels = generatedWardrobeSlotOptions = new List<string>() { "None", "Face", "Hair", "Complexion", "Eyebrows", "Beard", "Ears", "Helmet", "Shoulders", "Chest", "Arms", "Hands", "Waist", "Legs", "Feet" };
}
else
{
if (compatibleRaces.Count == 0)
{
selectedIndex = -1;
generatedWardrobeSlotOptionsLabels = generatedWardrobeSlotOptions = new List<string>() { "None", "Face", "Hair", "Complexion", "Eyebrows", "Beard", "Ears", "Helmet", "Shoulders", "Chest", "Arms", "Hands", "Waist", "Legs", "Feet" };
}
else if (generatedWardrobeSlotOptions.Count == 0 || forceUpdate)
{
//Clear the list if we are forcing update
if (forceUpdate)
{
generatedWardrobeSlotOptions = new List<string>();
generatedWardrobeSlotOptionsLabels = new List<string>();
}
List<RaceData> thisRaceDatas = new List<RaceData>();
for (int i = 0; i < compatibleRaces.Count; i++)
{
thisRaceDatas.Add(GetCompatibleRaceData(compatibleRaces[i]));
}
for (int i = 0; i < thisRaceDatas.Count; i++)
{
if (thisRaceDatas[i] != null)
{
List<string> thisWardrobeSlots = thisRaceDatas[i].wardrobeSlots;
for (int wi = 0; wi < thisWardrobeSlots.Count; wi++)
{
//WardrobeSlots display as 'Hair (FemaleOnly)' (for example) if the wardrobe slot is only available for one of the compatible races
if (compatibleRaces.Count > 1 && i > 0)
{
if (!generatedWardrobeSlotOptions.Contains(thisWardrobeSlots[wi]))
{
generatedWardrobeSlotOptions.Insert(wi, thisWardrobeSlots[wi]);
generatedWardrobeSlotOptionsLabels.Insert(wi, thisWardrobeSlots[wi]);
}
}
else
{
generatedWardrobeSlotOptions.Add(thisWardrobeSlots[wi]);
generatedWardrobeSlotOptionsLabels.Add(thisWardrobeSlots[wi]);
}
}
}
else
{
//Compatible Race is missing
thisRaceDatas.RemoveAt(i);
selectedIndex = -2;
}
}
for (int i = 0; i < generatedWardrobeSlotOptions.Count; i++)
{
List<string> onlyIn = new List<string>();
for (int ii = 0; ii < thisRaceDatas.Count; ii++)
{
if (thisRaceDatas[ii].wardrobeSlots.Contains(generatedWardrobeSlotOptions[i]))
{
onlyIn.Add(thisRaceDatas[ii].raceName);
}
}
if (onlyIn.Count < thisRaceDatas.Count)
{
//its not in all of them
//generatedWardrobeSlotOptions[i] = generatedWardrobeSlotOptions[i] + " (" + String.Join(", ", onlyIn.ToArray()) + " Only)";
generatedWardrobeSlotOptionsLabels[i] = generatedWardrobeSlotOptionsLabels[i] + " (" + String.Join(", ", onlyIn.ToArray()) + " Only)";
}
}
}
}
if (generatedWardrobeSlotOptions.Count > 0)
{
for (int i = 0; i < generatedWardrobeSlotOptions.Count; i++)
{
if (generatedWardrobeSlotOptions[i] == selectedOption)
selectedIndex = i;
}
}
return selectedIndex;
}
//generate an option list for the BaseSlots that are available to hide for each race so we can make this a mask field too
private void GenerateBaseSlotsEnum(List<string> compatibleRaces, bool forceUpdate = false)
{
if (generatedBaseSlotOptions.Count == 0 || forceUpdate)
{
//clear the lists if we are forcing update
if (forceUpdate)
{
generatedBaseSlotOptions = new List<string>();
generatedBaseSlotOptionsLabels = new List<string>();
}
List<UMARecipeBase> thisBaseRecipes = new List<UMARecipeBase>();
Dictionary<string, List<string>> slotsRacesDict = new Dictionary<string, List<string>>();
for (int i = 0; i < compatibleRaces.Count; i++)
{
if (GetCompatibleRaceData(compatibleRaces[i]) == null)
continue;
thisBaseRecipes.Add(GetCompatibleRaceData(compatibleRaces[i]).baseRaceRecipe);
}
for (int i = 0; i < thisBaseRecipes.Count; i++)
{
if (thisBaseRecipes[i] != null)
{
UMAData.UMARecipe thisBaseRecipe = thisBaseRecipes[i].GetCachedRecipe(UMAContext.Instance);
SlotData[] thisSlots = thisBaseRecipe.GetAllSlots();
foreach (SlotData slot in thisSlots)
{
if (slot != null)
{
if (!generatedBaseSlotOptions.Contains(slot.asset.slotName))
{
generatedBaseSlotOptions.Add(slot.asset.slotName);
}
if (!slotsRacesDict.ContainsKey(slot.asset.slotName))
{
slotsRacesDict.Add(slot.asset.slotName, new List<string>());
}
slotsRacesDict[slot.asset.slotName].Add(compatibleRaces[i]);
}
}
}
}
//sort out the labels showing which race(s) the base slots are for if there is more than one compatible race
foreach (KeyValuePair<string, List<string>> kp in slotsRacesDict)
{
string compatibleRaceNames = "";
if (compatibleRaces.Count > 1)
{
compatibleRaceNames = " (" + String.Join(", ", kp.Value.ToArray()) + ")";
}
generatedBaseSlotOptionsLabels.Add(kp.Key + compatibleRaceNames);
}
}
}
private RaceData GetCompatibleRaceData(string raceName)
{
RaceData foundRace = null;
string[] foundRacesStrings = AssetDatabase.FindAssets("t:RaceData");
for (int i = 0; i < foundRacesStrings.Length; i++)
{
RaceData thisFoundRace = AssetDatabase.LoadAssetAtPath<RaceData>(AssetDatabase.GUIDToAssetPath(foundRacesStrings[i]));
if (thisFoundRace.raceName == raceName)
{
foundRace = thisFoundRace;
break;
}
}
return foundRace;
}
protected virtual bool DrawCompatibleRacesUI(Type TargetType)
{
bool doUpdate = false;
float padding = 2f;
//FieldInfos
FieldInfo CompatibleRacesField = TargetType.GetField("compatibleRaces", BindingFlags.Public | BindingFlags.Instance);
FieldInfo WardrobeRecipeThumbsField = TargetType.GetField("wardrobeRecipeThumbs", BindingFlags.Public | BindingFlags.Instance);
//may not be needed- Check
FieldInfo WardrobeSlotField = TargetType.GetField("wardrobeSlot", BindingFlags.Public | BindingFlags.Instance);
//field values
List<string> compatibleRaces = (List<string>)CompatibleRacesField.GetValue(target);
List<WardrobeRecipeThumb> wardrobeThumbs = (List<WardrobeRecipeThumb>)WardrobeRecipeThumbsField.GetValue(target);
string wardrobeSlot = (string)WardrobeSlotField.GetValue(target);
//new field values
List<string> newCompatibleRaces = new List<string>(compatibleRaces);
List<WardrobeRecipeThumb> newWardrobeThumbs = new List<WardrobeRecipeThumb>();
List<string> wardrobeThumbsDropDown = new List<string>();
if (compatibleRaces.Count > 0)
{
foreach (string cr in compatibleRaces)
{
bool wrtFound = false;
foreach (WardrobeRecipeThumb wrt in wardrobeThumbs)
{
if (wrt.race == cr)
{
newWardrobeThumbs.Add(wrt);
wrtFound = true;
}
}
if (wrtFound == false)
{
newWardrobeThumbs.Add(new WardrobeRecipeThumb(cr));
}
}
foreach (WardrobeRecipeThumb wrt in newWardrobeThumbs)
{
wardrobeThumbsDropDown.Add(wrt.race);
}
}
GUILayout.Space(10);
Rect dropArea = new Rect();
Rect dropAreaBox = new Rect();
if (compatibleRaces.Count > 0)
{
dropArea = GUILayoutUtility.GetRect(0.0f, 50.0f + EditorGUIUtility.singleLineHeight, GUILayout.ExpandWidth(true));
dropArea.width = dropArea.width - 85f;
dropAreaBox = dropArea;
dropAreaBox.y = dropAreaBox.y + EditorGUIUtility.singleLineHeight;
dropAreaBox.height = dropAreaBox.height - EditorGUIUtility.singleLineHeight;
}
else
{
dropArea = GUILayoutUtility.GetRect(0.0f, 50.0f, GUILayout.ExpandWidth(true));
dropAreaBox = dropArea;
}
GUI.Box(dropAreaBox, "Drag Races compatible with this Recipe here. Click to pick.");
if (compatibleRaces.Count > 0)
{
for (int i = 0; i < compatibleRaces.Count; i++)
{
GUILayout.Space(padding);
GUI.enabled = false; //we readonly to prevent typos
Rect crfRect = GUILayoutUtility.GetRect(0.0f, EditorGUIUtility.singleLineHeight, GUILayout.ExpandWidth(true));
Rect crfDelRect = crfRect;
crfRect.width = crfRect.width - 75f - 20f - 20f;
crfDelRect.width = 20f + padding;
crfDelRect.x = crfRect.width + 20f + padding;
EditorGUI.TextField(crfRect, compatibleRaces[i]);
GUI.enabled = true;
if (GUI.Button(crfDelRect, "X"))
{
newCompatibleRaces.RemoveAt(i);
}
}
Rect thumbnailRect = dropArea;
thumbnailRect.x = dropArea.width + padding + 20f;
thumbnailRect.width = 75f;
thumbnailRect.y = thumbnailRect.y - 3f;
Rect thumbnailDDRect = thumbnailRect;
Rect thumbnailThumbRect = thumbnailRect;
thumbnailThumbRect.height = 75f;
EditorGUI.LabelField(thumbnailRect, "Thumbnail");
thumbnailDDRect.y = thumbnailDDRect.y + EditorGUIUtility.singleLineHeight;
thumbnailThumbRect.y = thumbnailThumbRect.y + EditorGUIUtility.singleLineHeight;
if (newCompatibleRaces.Count > 1)
{
thumbnailThumbRect.y = thumbnailThumbRect.y + EditorGUIUtility.singleLineHeight + padding;
selectedWardrobeThumb = EditorGUI.Popup(thumbnailDDRect, selectedWardrobeThumb, wardrobeThumbsDropDown.ToArray());
}
if (newWardrobeThumbs.Count != newCompatibleRaces.Count)
{
selectedWardrobeThumb = 0;
}
EditorGUI.BeginChangeCheck();
var thisImg = EditorGUI.ObjectField(thumbnailThumbRect, newWardrobeThumbs[selectedWardrobeThumb].thumb, typeof(Sprite), false);
if (EditorGUI.EndChangeCheck())
{
if (thisImg != newWardrobeThumbs[selectedWardrobeThumb].thumb)
{
newWardrobeThumbs[selectedWardrobeThumb].thumb = (Sprite)thisImg;
doUpdate = true;
}
}
}
else
{
EditorGUILayout.HelpBox("No Compatible Races set. This " + TargetType.ToString() + " will be available to all races.", MessageType.None);
}
CompatibleRacesDropArea(dropArea, newCompatibleRaces);
//update values
if (!AreListsEqual<string>(newCompatibleRaces, compatibleRaces))
{
//if the compatible races has changed we need to regenerate the enums
//If the libraries cannot load the raceBaseRecipe because of missing slots/overlays
//we dont want to actually change anything and need to show an error- but still show the recipe as it was
try
{
GenerateBaseSlotsEnum(newCompatibleRaces, true);
}
catch (UMAResourceNotFoundException e)
{
newCompatibleRaces = new List<string>(compatibleRaces);
Debug.LogError("The Recipe Editor could not add the selected compatible race because some required assets could not be found: " + e.Message);
}
GenerateWardrobeSlotsEnum(wardrobeSlot, newCompatibleRaces, true);
CompatibleRacesField.SetValue(target, newCompatibleRaces);
doUpdate = true;
}
if (!AreListsEqual<WardrobeRecipeThumb>(newWardrobeThumbs, wardrobeThumbs))
{
WardrobeRecipeThumbsField.SetValue(target, newWardrobeThumbs);
doUpdate = true;
}
return doUpdate;
}
protected virtual bool DrawWardrobeSlotsFields(Type TargetType)
{
bool doUpdate = false;
//Field Infos
FieldInfo CompatibleRacesField = TargetType.GetField("compatibleRaces", BindingFlags.Public | BindingFlags.Instance);
FieldInfo WardrobeSlotField = TargetType.GetField("wardrobeSlot", BindingFlags.Public | BindingFlags.Instance);
FieldInfo SuppressWardrobeSlotField = TargetType.GetField("suppressWardrobeSlots", BindingFlags.Public | BindingFlags.Instance);
FieldInfo HidesField = TargetType.GetField("Hides", BindingFlags.Public | BindingFlags.Instance);
FieldInfo DisplayValueField = TargetType.GetField("DisplayValue", BindingFlags.Public | BindingFlags.Instance);
//field values
List<string> compatibleRaces = (List<string>)CompatibleRacesField.GetValue(target);
string wardrobeSlot = (string)WardrobeSlotField.GetValue(target);
List<string> suppressWardrobeSlot = (List<string>)SuppressWardrobeSlotField.GetValue(target);
List<string> hides = (List<string>)HidesField.GetValue(target);
string displayValue = (string)DisplayValueField.GetValue(target);
//displayValue UI
string PreviousValue = displayValue;
displayValue = EditorGUILayout.TextField("Display Value", displayValue);
if (displayValue != PreviousValue)
{
DisplayValueField.SetValue(target, displayValue);
doUpdate = true;
}
//wardrobeSlot UI
int selectedWardrobeSlotIndex = GenerateWardrobeSlotsEnum(wardrobeSlot, compatibleRaces, false);
string newWardrobeSlot;
int newSuppressFlags = 0;
List<string> newSuppressWardrobeSlot = new List<string>();
if (selectedWardrobeSlotIndex == -1)
{
EditorGUILayout.LabelField("No Compatible Races set. You need to select a compatible race in order to set a wardrobe slot");
newWardrobeSlot = "None";
}
else if (selectedWardrobeSlotIndex == -2)
{
EditorGUILayout.LabelField("Not all compatible races found. Do you have the all correct Race(s) available Locally?");
newWardrobeSlot = "None";
}
else
{
int newSelectedWardrobeSlotIndex = EditorGUILayout.Popup("Wardrobe Slot", selectedWardrobeSlotIndex, generatedWardrobeSlotOptionsLabels.ToArray());
if (newSelectedWardrobeSlotIndex != selectedWardrobeSlotIndex)
{
WardrobeSlotField.SetValue(target, generatedWardrobeSlotOptions[newSelectedWardrobeSlotIndex]);
doUpdate = true;
}
newWardrobeSlot = generatedWardrobeSlotOptions.Count > 0 ? generatedWardrobeSlotOptions[selectedWardrobeSlotIndex] : "None";
}
//SuppressedSlots UI
int suppressFlags = 0;
for (int i = 0; i < generatedWardrobeSlotOptions.Count; i++)
{
if (suppressWardrobeSlot.Contains(generatedWardrobeSlotOptions[i]))
{
suppressFlags |= 0x1 << i;
}
}
newSuppressFlags = EditorGUILayout.MaskField("Suppress Wardrobe Slot(s)", suppressFlags, generatedWardrobeSlotOptionsLabels.ToArray());
for (int i = 0; i < generatedWardrobeSlotOptions.Count; i++)
{
if ((newSuppressFlags & (1 << i)) == (1 << i))
{
newSuppressWardrobeSlot.Add(generatedWardrobeSlotOptions[i]);
}
}
if (newSuppressWardrobeSlot.Count > 1)
{
GUI.enabled = false;
string swsl2Result = String.Join(", ", newSuppressWardrobeSlot.ToArray());
EditorGUILayout.TextField(swsl2Result);
GUI.enabled = true;
}
//Hides UI
GenerateBaseSlotsEnum(compatibleRaces, false);
int hiddenBaseFlags = 0;
List<string> newHides = new List<string>();
for (int i = 0; i < generatedBaseSlotOptions.Count; i++)
{
if (hides.Contains(generatedBaseSlotOptions[i]))
{
hiddenBaseFlags |= 0x1 << i;
}
}
int newHiddenBaseFlags = 0;
newHiddenBaseFlags = EditorGUILayout.MaskField("Hides Base Slot(s)", hiddenBaseFlags, generatedBaseSlotOptionsLabels.ToArray());
for (int i = 0; i < generatedBaseSlotOptionsLabels.Count; i++)
{
if ((newHiddenBaseFlags & (1 << i)) == (1 << i))
{
newHides.Add(generatedBaseSlotOptions[i]);
}
}
if (newHides.Count > 1)
{
GUI.enabled = false;
string newHidesResult = String.Join(", ", newHides.ToArray());
EditorGUILayout.TextField(newHidesResult);
GUI.enabled = true;
}
//Update the values
if (newWardrobeSlot != wardrobeSlot)
{
WardrobeSlotField.SetValue(target, newWardrobeSlot);
doUpdate = true;
}
if (!AreListsEqual<string>(newSuppressWardrobeSlot, suppressWardrobeSlot))
{
SuppressWardrobeSlotField.SetValue(target, newSuppressWardrobeSlot);
doUpdate = true;
}
if (!AreListsEqual<string>(newHides, hides))
{
HidesField.SetValue(target, newHides);
doUpdate = true;
}
return doUpdate;
}
/// <summary>
/// And editor for a WardrobeRecipe that shows sharedColors and Slots but hides the 'raceData' field (because WardrobeRecipes have a 'compatibleRaces' list)
/// </summary>
public class WardrobeRecipeMasterEditor : SlotMasterEditor
{
public WardrobeRecipeMasterEditor(UMAData.UMARecipe recipe) : base(recipe)
{
}
public override bool OnGUI(ref bool _dnaDirty, ref bool _textureDirty, ref bool _meshDirty)
{
bool changed = false;
if (_sharedColorsEditor.OnGUI(_recipe))
{
changed = true;
_textureDirty = true;
}
if (GUILayout.Button("Remove Nulls"))
{
var newList = new List<SlotData>(_recipe.slotDataList.Length);
foreach (var slotData in _recipe.slotDataList)
{
if (slotData != null) newList.Add(slotData);
}
_recipe.slotDataList = newList.ToArray();
changed |= true;
_dnaDirty |= true;
_textureDirty |= true;
_meshDirty |= true;
}
GUILayout.Space(20);
Rect dropArea = GUILayoutUtility.GetRect(0.0f, 50.0f, GUILayout.ExpandWidth(true));
GUI.Box(dropArea, "Drag Slots here");
GUILayout.Space(20);
if (DropAreaGUI(dropArea))
{
changed |= true;
_dnaDirty |= true;
_textureDirty |= true;
_meshDirty |= true;
}
var added = (SlotDataAsset)EditorGUILayout.ObjectField("Add Slot", null, typeof(SlotDataAsset), false);
if (added != null)
{
var slot = new SlotData(added);
_recipe.MergeSlot(slot, false);
changed |= true;
_dnaDirty |= true;
_textureDirty |= true;
_meshDirty |= true;
}
GUILayout.BeginHorizontal();
if (GUILayout.Button("Collapse All"))
{
foreach (SlotEditor se in _slotEditors)
{
se.FoldOut = false;
}
}
if (GUILayout.Button("Expand All"))
{
foreach (SlotEditor se in _slotEditors)
{
se.FoldOut = true;
}
}
GUILayout.EndHorizontal();
for (int i = 0; i < _slotEditors.Count; i++)
{
var editor = _slotEditors[i];
if (editor == null)
{
GUILayout.Label("Empty Slot");
continue;
}
changed |= editor.OnGUI(ref _dnaDirty, ref _textureDirty, ref _meshDirty);
if (editor.Delete)
{
_dnaDirty = true;
_textureDirty = true;
_meshDirty = true;
_slotEditors.RemoveAt(i);
_recipe.SetSlot(editor.idx, null);
i--;
changed = true;
}
}
return changed;
}
}
}
}
#endif
| |
// ----------------------------------------------------------------------------------
//
// Copyright Microsoft Corporation
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// ----------------------------------------------------------------------------------
namespace Microsoft.WindowsAzure.Management.Sync.Download
{
using System;
using System.Collections;
using System.Collections.Specialized;
using System.Runtime.Serialization;
using System.Text;
using System.Web;
public class BlobUri
{
public static bool TryParseUri(Uri uri, out BlobUri blobUri)
{
blobUri = null;
string storageAccountName;
string storageDomainName;
string blobContainerName;
string blobName;
string queryString;
string secret;
var result = TryParseUri(uri,
out storageAccountName,
out storageDomainName,
out blobContainerName,
out blobName,
out queryString,
out secret);
if (!result)
{
return false;
}
blobUri = new BlobUri(uri, storageAccountName, storageDomainName, blobContainerName, blobName, queryString);
return true;
}
internal static bool TryParseUri(Uri blobUri,
out string storageAccountName,
out string storageDomainName,
out string blobContainerName,
out string blobName,
out string queryString,
out string secret)
{
storageAccountName = null;
storageDomainName = null;
blobContainerName = null;
blobName = null;
secret = null;
queryString = null;
string[] hostSegments = blobUri.DnsSafeHost.ToLower().Split('.');
if (hostSegments.Length < 2)
{
return false;
}
storageAccountName = hostSegments[0];
storageDomainName = string.Join(".", hostSegments, 1, hostSegments.Length - 1);
blobContainerName = null;
blobName = null;
string[] segments = blobUri.AbsolutePath.Split(new char[] { '/' }, StringSplitOptions.RemoveEmptyEntries);
if (segments.Length < 2)
{
return false; //Must have atleast a containerName and BlobName
}
blobName = HttpUtility.UrlDecode(
string.Join(
"/", segments, 1, segments.Length - 1));
blobContainerName = HttpUtility.UrlDecode(segments[0]);
NameValueCollection queryValues = HttpUtility.ParseQueryString(blobUri.Query);
StringBuilder queryBuilder = new StringBuilder();
bool firstQuery = true;
foreach (string key in queryValues.AllKeys)
{
if (string.Equals(key, "dsas_secret", StringComparison.OrdinalIgnoreCase))
{
secret = queryValues["dsas_secret"];
}
else
{
if (firstQuery)
{
queryBuilder.AppendFormat("?{0}={1}", key.Replace("?", ""), queryValues[key]);
firstQuery = false;
}
else
{
queryBuilder.AppendFormat("&{0}={1}", key, queryValues[key]);
}
}
}
if (!string.IsNullOrEmpty(secret))
{
//TODO[JWA], Find out the complete set of decoding;
secret = secret.Replace(' ', '+');
}
queryString = queryBuilder.ToString();
return true;
}
public Uri Uri { get; private set; }
public string BaseUri { get; private set; }
public string StorageAccountName { get; private set; }
public string StorageDomainName { get; private set; }
public string BlobContainerName { get; private set; }
public string BlobName { get; private set; }
public string QueryString { get; set; }
public string BlobPath { get; private set; }
public BlobUri(Uri uri, string storageAccountName, string storageDomainName, string blobContainerName, string blobName, string queryString)
{
Uri = uri;
StorageAccountName = storageAccountName;
StorageDomainName = storageDomainName;
BlobContainerName = blobContainerName;
BlobName = blobName;
QueryString = queryString;
BaseUri = uri.Scheme + Uri.SchemeDelimiter + uri.DnsSafeHost;
BlobPath = this.BaseUri + uri.LocalPath;
}
}
#region HttpUtility port from .Net 4.0
class HttpUtility
{
public static string UrlDecode(string str)
{
if (str == null)
{
return null;
}
return UrlDecode(str, Encoding.UTF8);
}
public static string UrlDecode(string str, Encoding e)
{
if (str == null)
{
return null;
}
return UrlDecodeStringFromStringInternal(str, e);
}
private static string UrlDecodeStringFromStringInternal(string s, Encoding e)
{
int length = s.Length;
UrlDecoder decoder = new UrlDecoder(length, e);
for (int i = 0; i < length; i++)
{
char ch = s[i];
if (ch == '+')
{
ch = ' ';
}
else if ((ch == '%') && (i < (length - 2)))
{
if ((s[i + 1] == 'u') && (i < (length - 5)))
{
int num3 = HexToInt(s[i + 2]);
int num4 = HexToInt(s[i + 3]);
int num5 = HexToInt(s[i + 4]);
int num6 = HexToInt(s[i + 5]);
if (((num3 < 0) || (num4 < 0)) || ((num5 < 0) || (num6 < 0)))
{
goto Label_0106;
}
ch = (char)((((num3 << 12) | (num4 << 8)) | (num5 << 4)) | num6);
i += 5;
decoder.AddChar(ch);
continue;
}
int num7 = HexToInt(s[i + 1]);
int num8 = HexToInt(s[i + 2]);
if ((num7 >= 0) && (num8 >= 0))
{
byte b = (byte)((num7 << 4) | num8);
i += 2;
decoder.AddByte(b);
continue;
}
}
Label_0106:
if ((ch & 0xff80) == 0)
{
decoder.AddByte((byte)ch);
}
else
{
decoder.AddChar(ch);
}
}
return decoder.GetString();
}
private static int HexToInt(char h)
{
if ((h >= '0') && (h <= '9'))
{
return (h - '0');
}
if ((h >= 'a') && (h <= 'f'))
{
return ((h - 'a') + 10);
}
if ((h >= 'A') && (h <= 'F'))
{
return ((h - 'A') + 10);
}
return -1;
}
public static string UrlEncodeUnicode(string str)
{
if (str == null)
{
return null;
}
return UrlEncodeUnicodeStringToStringInternal(str, false);
}
private static string UrlEncodeUnicodeStringToStringInternal(string s, bool ignoreAscii)
{
int length = s.Length;
StringBuilder builder = new StringBuilder(length);
for (int i = 0; i < length; i++)
{
char ch = s[i];
if ((ch & 0xff80) == 0)
{
if (ignoreAscii || IsSafe(ch))
{
builder.Append(ch);
}
else if (ch == ' ')
{
builder.Append('+');
}
else
{
builder.Append('%');
builder.Append(IntToHex((ch >> 4) & '\x000f'));
builder.Append(IntToHex(ch & '\x000f'));
}
}
else
{
builder.Append("%u");
builder.Append(IntToHex((ch >> 12) & '\x000f'));
builder.Append(IntToHex((ch >> 8) & '\x000f'));
builder.Append(IntToHex((ch >> 4) & '\x000f'));
builder.Append(IntToHex(ch & '\x000f'));
}
}
return builder.ToString();
}
internal static bool IsSafe(char ch)
{
if ((((ch >= 'a') && (ch <= 'z')) || ((ch >= 'A') && (ch <= 'Z'))) || ((ch >= '0') && (ch <= '9')))
{
return true;
}
switch (ch)
{
case '\'':
case '(':
case ')':
case '*':
case '-':
case '.':
case '_':
case '!':
return true;
}
return false;
}
internal static char IntToHex(int n)
{
if (n <= 9)
{
return (char)(n + 0x30);
}
return (char)((n - 10) + 0x61);
}
public static NameValueCollection ParseQueryString(string query)
{
return ParseQueryString(query, Encoding.UTF8);
}
public static NameValueCollection ParseQueryString(string query, Encoding encoding)
{
if (query == null)
{
throw new ArgumentNullException("query");
}
if (encoding == null)
{
throw new ArgumentNullException("encoding");
}
if ((query.Length > 0) && (query[0] == '?'))
{
query = query.Substring(1);
}
return new HttpValueCollection(query, false, true, encoding);
}
private class UrlDecoder
{
// Fields
private int _bufferSize;
private byte[] _byteBuffer;
private char[] _charBuffer;
private Encoding _encoding;
private int _numBytes;
private int _numChars;
// Methods
internal UrlDecoder(int bufferSize, Encoding encoding)
{
this._bufferSize = bufferSize;
this._encoding = encoding;
this._charBuffer = new char[bufferSize];
}
internal void AddByte(byte b)
{
if (this._byteBuffer == null)
{
this._byteBuffer = new byte[this._bufferSize];
}
this._byteBuffer[this._numBytes++] = b;
}
internal void AddChar(char ch)
{
if (this._numBytes > 0)
{
this.FlushBytes();
}
this._charBuffer[this._numChars++] = ch;
}
private void FlushBytes()
{
if (this._numBytes > 0)
{
this._numChars += this._encoding.GetChars(this._byteBuffer, 0, this._numBytes, this._charBuffer, this._numChars);
this._numBytes = 0;
}
}
internal string GetString()
{
if (this._numBytes > 0)
{
this.FlushBytes();
}
if (this._numChars > 0)
{
return new string(this._charBuffer, 0, this._numChars);
}
return string.Empty;
}
}
}
[Serializable]
internal class HttpValueCollection : NameValueCollection
{
// Methods
internal HttpValueCollection()
: base(StringComparer.OrdinalIgnoreCase)
{
}
internal HttpValueCollection(int capacity)
: base(capacity, (IEqualityComparer)StringComparer.OrdinalIgnoreCase)
{
}
protected HttpValueCollection(SerializationInfo info, StreamingContext context)
: base(info, context)
{
}
internal HttpValueCollection(string str, bool readOnly, bool urlencoded, Encoding encoding)
: base(StringComparer.OrdinalIgnoreCase)
{
if (!string.IsNullOrEmpty(str))
{
this.FillFromString(str, urlencoded, encoding);
}
base.IsReadOnly = readOnly;
}
internal void FillFromString(string s)
{
this.FillFromString(s, false, null);
}
internal void FillFromString(string s, bool urlencoded, Encoding encoding)
{
int num = (s != null) ? s.Length : 0;
for (int i = 0; i < num; i++)
{
int startIndex = i;
int num4 = -1;
while (i < num)
{
char ch = s[i];
if (ch == '=')
{
if (num4 < 0)
{
num4 = i;
}
}
else if (ch == '&')
{
break;
}
i++;
}
string str = null;
string str2 = null;
if (num4 >= 0)
{
str = s.Substring(startIndex, num4 - startIndex);
str2 = s.Substring(num4 + 1, (i - num4) - 1);
}
else
{
str2 = s.Substring(startIndex, i - startIndex);
}
if (urlencoded)
{
base.Add(HttpUtility.UrlDecode(str, encoding), HttpUtility.UrlDecode(str2, encoding));
}
else
{
base.Add(str, str2);
}
if ((i == (num - 1)) && (s[i] == '&'))
{
base.Add(null, string.Empty);
}
}
}
internal void MakeReadOnly()
{
base.IsReadOnly = true;
}
internal void MakeReadWrite()
{
base.IsReadOnly = false;
}
internal void Reset()
{
base.Clear();
}
public override string ToString()
{
return this.ToString(true);
}
internal virtual string ToString(bool urlencoded)
{
return this.ToString(urlencoded, null);
}
internal virtual string ToString(bool urlencoded, IDictionary excludeKeys)
{
int count = this.Count;
if (count == 0)
{
return string.Empty;
}
StringBuilder builder = new StringBuilder();
bool flag = (excludeKeys != null) && (excludeKeys["__VIEWSTATE"] != null);
for (int i = 0; i < count; i++)
{
string key = this.GetKey(i);
if (((!flag || (key == null)) || !key.StartsWith("__VIEWSTATE", StringComparison.Ordinal)) && (((excludeKeys == null) || (key == null)) || (excludeKeys[key] == null)))
{
string str3;
if (urlencoded)
{
key = HttpUtility.UrlEncodeUnicode(key);
}
string str2 = !string.IsNullOrEmpty(key) ? (key + "=") : string.Empty;
ArrayList list = (ArrayList)base.BaseGet(i);
int num3 = (list != null) ? list.Count : 0;
if (builder.Length > 0)
{
builder.Append('&');
}
if (num3 == 1)
{
builder.Append(str2);
str3 = (string)list[0];
if (urlencoded)
{
str3 = HttpUtility.UrlEncodeUnicode(str3);
}
builder.Append(str3);
}
else if (num3 == 0)
{
builder.Append(str2);
}
else
{
for (int j = 0; j < num3; j++)
{
if (j > 0)
{
builder.Append('&');
}
builder.Append(str2);
str3 = (string)list[j];
if (urlencoded)
{
str3 = HttpUtility.UrlEncodeUnicode(str3);
}
builder.Append(str3);
}
}
}
}
return builder.ToString();
}
}
#endregion
}
| |
/*
* Copyright 2013 ZXing authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
using System;
namespace ZXing.PDF417.Internal
{
/// <summary>
/// Represents a Column in the Detection Result
/// </summary>
/// <author>Guenther Grau</author>
public sealed class DetectionResultRowIndicatorColumn : DetectionResultColumn
{
/// <summary>
/// Gets or sets a value indicating whether this instance is the left indicator
/// </summary>
/// <value><c>true</c> if this instance is left; otherwise, <c>false</c>.</value>
public bool IsLeft { get; set; }
/// <summary>
/// Initializes a new instance of the <see cref="ZXing.PDF417.Internal.DetectionResultRowIndicatorColumn"/> class.
/// </summary>
/// <param name="box">Box.</param>
/// <param name="isLeft">If set to <c>true</c> is left.</param>
public DetectionResultRowIndicatorColumn(BoundingBox box, bool isLeft)
: base(box)
{
this.IsLeft = isLeft;
}
/// <summary>
/// Sets the Row Numbers as Inidicator Columns
/// </summary>
public void setRowNumbers()
{
foreach (var cw in Codewords)
{
if (cw != null)
{
cw.setRowNumberAsRowIndicatorColumn();
}
}
}
/// <summary>
/// TODO implement properly
/// TODO maybe we should add missing codewords to store the correct row number to make
/// finding row numbers for other columns easier
/// use row height count to make detection of invalid row numbers more reliable
/// </summary>
/// <returns>The indicator column row numbers.</returns>
/// <param name="metadata">Metadata.</param>
public int adjustCompleteIndicatorColumnRowNumbers(BarcodeMetadata metadata)
{
var codewords = Codewords;
setRowNumbers(); // Assign this as an indicator column
removeIncorrectCodewords(codewords, metadata);
ResultPoint top = IsLeft ? Box.TopLeft : Box.TopRight;
ResultPoint bottom = IsLeft ? Box.BottomLeft : Box.BottomRight;
int firstRow = imageRowToCodewordIndex((int) top.Y);
int lastRow = imageRowToCodewordIndex((int) bottom.Y);
// We need to be careful using the average row height.
// Barcode could be skewed so that we have smaller and taller rows
float averageRowHeight = (lastRow - firstRow)/(float) metadata.RowCount;
// initialize loop
int barcodeRow = -1;
int maxRowHeight = 1;
int currentRowHeight = 0;
for (int codewordRow = firstRow; codewordRow < lastRow; codewordRow++)
{
var codeword = codewords[codewordRow];
if (codeword == null)
{
continue;
}
// float expectedRowNumber = (codewordsRow - firstRow) / averageRowHeight;
// if (Math.abs(codeword.getRowNumber() - expectedRowNumber) > 2) {
// SimpleLog.log(LEVEL.WARNING,
// "Removing codeword, rowNumberSkew too high, codeword[" + codewordsRow + "]: Expected Row: " +
// expectedRowNumber + ", RealRow: " + codeword.getRowNumber() + ", value: " + codeword.getValue());
// codewords[codewordsRow] = null;
// }
int rowDifference = codeword.RowNumber - barcodeRow;
// TODO improve handling with case where first row indicator doesn't start with 0
if (rowDifference == 0)
{
currentRowHeight++;
}
else if (rowDifference == 1)
{
maxRowHeight = Math.Max(maxRowHeight, currentRowHeight);
currentRowHeight = 1;
barcodeRow = codeword.RowNumber;
}
else if (rowDifference < 0 ||
codeword.RowNumber >= metadata.RowCount ||
rowDifference > codewordRow)
{
codewords[codewordRow] = null;
}
else
{
int checkedRows;
if (maxRowHeight > 2)
{
checkedRows = (maxRowHeight - 2)*rowDifference;
}
else
{
checkedRows = rowDifference;
}
bool closePreviousCodewordFound = checkedRows > codewordRow;
for (int i = 1; i <= checkedRows && !closePreviousCodewordFound; i++)
{
// there must be (height * rowDifference) number of codewords missing. For now we assume height = 1.
// This should hopefully get rid of most problems already.
closePreviousCodewordFound = codewords[codewordRow - i] != null;
}
if (closePreviousCodewordFound)
{
codewords[codewordRow] = null;
}
else
{
barcodeRow = codeword.RowNumber;
currentRowHeight = 1;
}
}
}
return (int) (averageRowHeight + 0.5);
}
/// <summary>
/// Gets the row heights.
/// </summary>
/// <returns>The row heights.</returns>
public int[] getRowHeights()
{
BarcodeMetadata barcodeMetadata = getBarcodeMetadata();
if (barcodeMetadata == null)
{
return null;
}
adjustIncompleteIndicatorColumnRowNumbers(barcodeMetadata);
int[] result = new int[barcodeMetadata.RowCount];
foreach (var codeword in Codewords)
{
if (codeword != null)
{
int rowNumber = codeword.RowNumber;
if (rowNumber >= result.Length)
{
return null;
}
result[rowNumber]++;
} // else throw exception? (or return null)
}
return result;
}
/// <summary>
/// Adjusts the in omplete indicator column row numbers.
/// </summary>
/// <param name="metadata">Metadata.</param>
public int adjustIncompleteIndicatorColumnRowNumbers(BarcodeMetadata metadata)
{
// TODO maybe we should add missing codewords to store the correct row number to make
// finding row numbers for other columns easier
// use row height count to make detection of invalid row numbers more reliable
ResultPoint top = IsLeft ? Box.TopLeft : Box.TopRight;
ResultPoint bottom = IsLeft ? Box.BottomLeft : Box.BottomRight;
int firstRow = imageRowToCodewordIndex((int) top.Y);
int lastRow = imageRowToCodewordIndex((int) bottom.Y);
// We need to be careful using the average row height.
// Barcode could be skewed so that we have smaller and taller rows
float averageRowHeight = (lastRow - firstRow)/(float) metadata.RowCount;
var codewords = Codewords;
// initialize loop
int barcodeRow = -1;
int maxRowHeight = 1;
int currentRowHeight = 0;
for (int codewordRow = firstRow; codewordRow < lastRow; codewordRow++)
{
var codeword = codewords[codewordRow];
if (codeword == null)
{
continue;
}
codeword.setRowNumberAsRowIndicatorColumn();
int rowDifference = codeword.RowNumber - barcodeRow;
// TODO improve handling with case where first row indicator doesn't start with 0
if (rowDifference == 0)
{
currentRowHeight++;
}
else if (rowDifference == 1)
{
maxRowHeight = Math.Max(maxRowHeight, currentRowHeight);
currentRowHeight = 1;
barcodeRow = codeword.RowNumber;
}
else if (codeword.RowNumber > metadata.RowCount)
{
Codewords[codewordRow] = null;
}
else
{
barcodeRow = codeword.RowNumber;
currentRowHeight = 1;
}
}
return (int) (averageRowHeight + 0.5);
}
/// <summary>
/// Gets the barcode metadata.
/// </summary>
/// <returns>The barcode metadata.</returns>
public BarcodeMetadata getBarcodeMetadata()
{
var codewords = Codewords;
BarcodeValue barcodeColumnCount = new BarcodeValue();
BarcodeValue barcodeRowCountUpperPart = new BarcodeValue();
BarcodeValue barcodeRowCountLowerPart = new BarcodeValue();
BarcodeValue barcodeECLevel = new BarcodeValue();
foreach (Codeword codeword in codewords)
{
if (codeword == null)
{
continue;
}
codeword.setRowNumberAsRowIndicatorColumn();
int rowIndicatorValue = codeword.Value%30;
int codewordRowNumber = codeword.RowNumber;
if (!IsLeft)
{
codewordRowNumber += 2;
}
switch (codewordRowNumber%3)
{
case 0:
barcodeRowCountUpperPart.setValue(rowIndicatorValue*3 + 1);
break;
case 1:
barcodeECLevel.setValue(rowIndicatorValue/3);
barcodeRowCountLowerPart.setValue(rowIndicatorValue%3);
break;
case 2:
barcodeColumnCount.setValue(rowIndicatorValue + 1);
break;
}
}
// Maybe we should check if we have ambiguous values?
var barcodeColumnCountValues = barcodeColumnCount.getValue();
var barcodeRowCountUpperPartValues = barcodeRowCountUpperPart.getValue();
var barcodeRowCountLowerPartValues = barcodeRowCountLowerPart.getValue();
var barcodeECLevelValues = barcodeECLevel.getValue();
if ((barcodeColumnCountValues.Length == 0) ||
(barcodeRowCountUpperPartValues.Length == 0) ||
(barcodeRowCountLowerPartValues.Length == 0) ||
(barcodeECLevelValues.Length == 0) ||
barcodeColumnCountValues[0] < 1 ||
barcodeRowCountUpperPartValues[0] + barcodeRowCountLowerPartValues[0] < PDF417Common.MIN_ROWS_IN_BARCODE ||
barcodeRowCountUpperPartValues[0] + barcodeRowCountLowerPartValues[0] > PDF417Common.MAX_ROWS_IN_BARCODE)
{
return null;
}
var barcodeMetadata = new BarcodeMetadata(barcodeColumnCountValues[0],
barcodeRowCountUpperPartValues[0],
barcodeRowCountLowerPartValues[0],
barcodeECLevelValues[0]);
removeIncorrectCodewords(codewords, barcodeMetadata);
return barcodeMetadata;
}
/// <summary>
/// Prune the codewords which do not match the metadata
/// TODO Maybe we should keep the incorrect codewords for the start and end positions?
/// </summary>
/// <param name="codewords">Codewords.</param>
/// <param name="metadata">Metadata.</param>
private void removeIncorrectCodewords(Codeword[] codewords, BarcodeMetadata metadata)
{
for (int row = 0; row < codewords.Length; row++)
{
var codeword = codewords[row];
if (codeword == null)
continue;
int indicatorValue = codeword.Value%30;
int rowNumber = codeword.RowNumber;
// Row does not exist in the metadata
if (rowNumber >= metadata.RowCount) // different to java rowNumber > metadata.RowCount
{
codewords[row] = null; // remove this.
continue;
}
if (!IsLeft)
{
rowNumber += 2;
}
switch (rowNumber%3)
{
default:
case 0:
if (indicatorValue*3 + 1 != metadata.RowCountUpper)
{
codewords[row] = null;
}
break;
case 1:
if (indicatorValue%3 != metadata.RowCountLower ||
indicatorValue/3 != metadata.ErrorCorrectionLevel)
{
codewords[row] = null;
}
break;
case 2:
if (indicatorValue + 1 != metadata.ColumnCount)
{
codewords[row] = null;
}
break;
}
}
}
/// <summary>
/// Returns a <see cref="System.String"/> that represents the current <see cref="ZXing.PDF417.Internal.DetectionResultRowIndicatorColumn"/>.
/// </summary>
/// <returns>A <see cref="System.String"/> that represents the current <see cref="ZXing.PDF417.Internal.DetectionResultRowIndicatorColumn"/>.</returns>
public override string ToString()
{
return "Is Left: " + IsLeft + " \n" + base.ToString();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
//------------------------------------------------------------------------------
using System.Collections;
using System.Collections.Generic;
using System.Data.Common;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Linq;
namespace System.Data.SqlClient
{
public sealed class SqlConnectionStringBuilder : DbConnectionStringBuilder
{
private enum Keywords
{ // specific ordering for ConnectionString output construction
// NamedConnection,
DataSource,
FailoverPartner,
AttachDBFilename,
InitialCatalog,
IntegratedSecurity,
PersistSecurityInfo,
UserID,
Password,
Pooling,
MinPoolSize,
MaxPoolSize,
MultipleActiveResultSets,
Replication,
ConnectTimeout,
Encrypt,
TrustServerCertificate,
LoadBalanceTimeout,
PacketSize,
TypeSystemVersion,
ApplicationName,
CurrentLanguage,
WorkstationID,
UserInstance,
ApplicationIntent,
MultiSubnetFailover,
ConnectRetryCount,
ConnectRetryInterval,
// keep the count value last
KeywordsCount
}
internal const int KeywordsCount = (int)Keywords.KeywordsCount;
internal const int DeprecatedKeywordsCount = 6;
private static readonly string[] s_validKeywords;
private static readonly Dictionary<string, Keywords> s_keywords;
private ApplicationIntent _applicationIntent = DbConnectionStringDefaults.ApplicationIntent;
private string _applicationName = DbConnectionStringDefaults.ApplicationName;
private string _attachDBFilename = DbConnectionStringDefaults.AttachDBFilename;
private string _currentLanguage = DbConnectionStringDefaults.CurrentLanguage;
private string _dataSource = DbConnectionStringDefaults.DataSource;
private string _failoverPartner = DbConnectionStringDefaults.FailoverPartner;
private string _initialCatalog = DbConnectionStringDefaults.InitialCatalog;
// private string _namedConnection = DbConnectionStringDefaults.NamedConnection;
private string _password = DbConnectionStringDefaults.Password;
private string _typeSystemVersion = DbConnectionStringDefaults.TypeSystemVersion;
private string _userID = DbConnectionStringDefaults.UserID;
private string _workstationID = DbConnectionStringDefaults.WorkstationID;
private int _connectTimeout = DbConnectionStringDefaults.ConnectTimeout;
private int _loadBalanceTimeout = DbConnectionStringDefaults.LoadBalanceTimeout;
private int _maxPoolSize = DbConnectionStringDefaults.MaxPoolSize;
private int _minPoolSize = DbConnectionStringDefaults.MinPoolSize;
private int _packetSize = DbConnectionStringDefaults.PacketSize;
private int _connectRetryCount = DbConnectionStringDefaults.ConnectRetryCount;
private int _connectRetryInterval = DbConnectionStringDefaults.ConnectRetryInterval;
private bool _encrypt = DbConnectionStringDefaults.Encrypt;
private bool _trustServerCertificate = DbConnectionStringDefaults.TrustServerCertificate;
private bool _integratedSecurity = DbConnectionStringDefaults.IntegratedSecurity;
private bool _multipleActiveResultSets = DbConnectionStringDefaults.MultipleActiveResultSets;
private bool _multiSubnetFailover = DbConnectionStringDefaults.MultiSubnetFailover;
private bool _persistSecurityInfo = DbConnectionStringDefaults.PersistSecurityInfo;
private bool _pooling = DbConnectionStringDefaults.Pooling;
private bool _replication = DbConnectionStringDefaults.Replication;
private bool _userInstance = DbConnectionStringDefaults.UserInstance;
static SqlConnectionStringBuilder()
{
string[] validKeywords = new string[KeywordsCount];
validKeywords[(int)Keywords.ApplicationIntent] = DbConnectionStringKeywords.ApplicationIntent;
validKeywords[(int)Keywords.ApplicationName] = DbConnectionStringKeywords.ApplicationName;
validKeywords[(int)Keywords.AttachDBFilename] = DbConnectionStringKeywords.AttachDBFilename;
validKeywords[(int)Keywords.ConnectTimeout] = DbConnectionStringKeywords.ConnectTimeout;
validKeywords[(int)Keywords.CurrentLanguage] = DbConnectionStringKeywords.CurrentLanguage;
validKeywords[(int)Keywords.DataSource] = DbConnectionStringKeywords.DataSource;
validKeywords[(int)Keywords.Encrypt] = DbConnectionStringKeywords.Encrypt;
validKeywords[(int)Keywords.FailoverPartner] = DbConnectionStringKeywords.FailoverPartner;
validKeywords[(int)Keywords.InitialCatalog] = DbConnectionStringKeywords.InitialCatalog;
validKeywords[(int)Keywords.IntegratedSecurity] = DbConnectionStringKeywords.IntegratedSecurity;
validKeywords[(int)Keywords.LoadBalanceTimeout] = DbConnectionStringKeywords.LoadBalanceTimeout;
validKeywords[(int)Keywords.MaxPoolSize] = DbConnectionStringKeywords.MaxPoolSize;
validKeywords[(int)Keywords.MinPoolSize] = DbConnectionStringKeywords.MinPoolSize;
validKeywords[(int)Keywords.MultipleActiveResultSets] = DbConnectionStringKeywords.MultipleActiveResultSets;
validKeywords[(int)Keywords.MultiSubnetFailover] = DbConnectionStringKeywords.MultiSubnetFailover;
// validKeywords[(int)Keywords.NamedConnection] = DbConnectionStringKeywords.NamedConnection;
validKeywords[(int)Keywords.PacketSize] = DbConnectionStringKeywords.PacketSize;
validKeywords[(int)Keywords.Password] = DbConnectionStringKeywords.Password;
validKeywords[(int)Keywords.PersistSecurityInfo] = DbConnectionStringKeywords.PersistSecurityInfo;
validKeywords[(int)Keywords.Pooling] = DbConnectionStringKeywords.Pooling;
validKeywords[(int)Keywords.Replication] = DbConnectionStringKeywords.Replication;
validKeywords[(int)Keywords.TrustServerCertificate] = DbConnectionStringKeywords.TrustServerCertificate;
validKeywords[(int)Keywords.TypeSystemVersion] = DbConnectionStringKeywords.TypeSystemVersion;
validKeywords[(int)Keywords.UserID] = DbConnectionStringKeywords.UserID;
validKeywords[(int)Keywords.UserInstance] = DbConnectionStringKeywords.UserInstance;
validKeywords[(int)Keywords.WorkstationID] = DbConnectionStringKeywords.WorkstationID;
validKeywords[(int)Keywords.ConnectRetryCount] = DbConnectionStringKeywords.ConnectRetryCount;
validKeywords[(int)Keywords.ConnectRetryInterval] = DbConnectionStringKeywords.ConnectRetryInterval;
s_validKeywords = validKeywords;
Dictionary<string, Keywords> hash = new Dictionary<string, Keywords>(KeywordsCount + SqlConnectionString.SynonymCount, StringComparer.OrdinalIgnoreCase);
hash.Add(DbConnectionStringKeywords.ApplicationIntent, Keywords.ApplicationIntent);
hash.Add(DbConnectionStringKeywords.ApplicationName, Keywords.ApplicationName);
hash.Add(DbConnectionStringKeywords.AttachDBFilename, Keywords.AttachDBFilename);
hash.Add(DbConnectionStringKeywords.ConnectTimeout, Keywords.ConnectTimeout);
hash.Add(DbConnectionStringKeywords.CurrentLanguage, Keywords.CurrentLanguage);
hash.Add(DbConnectionStringKeywords.DataSource, Keywords.DataSource);
hash.Add(DbConnectionStringKeywords.Encrypt, Keywords.Encrypt);
hash.Add(DbConnectionStringKeywords.FailoverPartner, Keywords.FailoverPartner);
hash.Add(DbConnectionStringKeywords.InitialCatalog, Keywords.InitialCatalog);
hash.Add(DbConnectionStringKeywords.IntegratedSecurity, Keywords.IntegratedSecurity);
hash.Add(DbConnectionStringKeywords.LoadBalanceTimeout, Keywords.LoadBalanceTimeout);
hash.Add(DbConnectionStringKeywords.MultipleActiveResultSets, Keywords.MultipleActiveResultSets);
hash.Add(DbConnectionStringKeywords.MaxPoolSize, Keywords.MaxPoolSize);
hash.Add(DbConnectionStringKeywords.MinPoolSize, Keywords.MinPoolSize);
hash.Add(DbConnectionStringKeywords.MultiSubnetFailover, Keywords.MultiSubnetFailover);
// hash.Add(DbConnectionStringKeywords.NamedConnection, Keywords.NamedConnection);
hash.Add(DbConnectionStringKeywords.PacketSize, Keywords.PacketSize);
hash.Add(DbConnectionStringKeywords.Password, Keywords.Password);
hash.Add(DbConnectionStringKeywords.PersistSecurityInfo, Keywords.PersistSecurityInfo);
hash.Add(DbConnectionStringKeywords.Pooling, Keywords.Pooling);
hash.Add(DbConnectionStringKeywords.Replication, Keywords.Replication);
hash.Add(DbConnectionStringKeywords.TrustServerCertificate, Keywords.TrustServerCertificate);
hash.Add(DbConnectionStringKeywords.TypeSystemVersion, Keywords.TypeSystemVersion);
hash.Add(DbConnectionStringKeywords.UserID, Keywords.UserID);
hash.Add(DbConnectionStringKeywords.UserInstance, Keywords.UserInstance);
hash.Add(DbConnectionStringKeywords.WorkstationID, Keywords.WorkstationID);
hash.Add(DbConnectionStringKeywords.ConnectRetryCount, Keywords.ConnectRetryCount);
hash.Add(DbConnectionStringKeywords.ConnectRetryInterval, Keywords.ConnectRetryInterval);
hash.Add(DbConnectionStringSynonyms.APP, Keywords.ApplicationName);
hash.Add(DbConnectionStringSynonyms.EXTENDEDPROPERTIES, Keywords.AttachDBFilename);
hash.Add(DbConnectionStringSynonyms.INITIALFILENAME, Keywords.AttachDBFilename);
hash.Add(DbConnectionStringSynonyms.CONNECTIONTIMEOUT, Keywords.ConnectTimeout);
hash.Add(DbConnectionStringSynonyms.TIMEOUT, Keywords.ConnectTimeout);
hash.Add(DbConnectionStringSynonyms.LANGUAGE, Keywords.CurrentLanguage);
hash.Add(DbConnectionStringSynonyms.ADDR, Keywords.DataSource);
hash.Add(DbConnectionStringSynonyms.ADDRESS, Keywords.DataSource);
hash.Add(DbConnectionStringSynonyms.NETWORKADDRESS, Keywords.DataSource);
hash.Add(DbConnectionStringSynonyms.SERVER, Keywords.DataSource);
hash.Add(DbConnectionStringSynonyms.DATABASE, Keywords.InitialCatalog);
hash.Add(DbConnectionStringSynonyms.TRUSTEDCONNECTION, Keywords.IntegratedSecurity);
hash.Add(DbConnectionStringSynonyms.ConnectionLifetime, Keywords.LoadBalanceTimeout);
hash.Add(DbConnectionStringSynonyms.Pwd, Keywords.Password);
hash.Add(DbConnectionStringSynonyms.PERSISTSECURITYINFO, Keywords.PersistSecurityInfo);
hash.Add(DbConnectionStringSynonyms.UID, Keywords.UserID);
hash.Add(DbConnectionStringSynonyms.User, Keywords.UserID);
hash.Add(DbConnectionStringSynonyms.WSID, Keywords.WorkstationID);
Debug.Assert((KeywordsCount + SqlConnectionString.SynonymCount) == hash.Count, "initial expected size is incorrect");
s_keywords = hash;
}
public SqlConnectionStringBuilder() : this((string)null)
{
}
public SqlConnectionStringBuilder(string connectionString) : base()
{
if (!ADP.IsEmpty(connectionString))
{
ConnectionString = connectionString;
}
}
public override object this[string keyword]
{
get
{
Keywords index = GetIndex(keyword);
return GetAt(index);
}
set
{
if (null != value)
{
Keywords index = GetIndex(keyword);
switch (index)
{
case Keywords.ApplicationIntent: this.ApplicationIntent = ConvertToApplicationIntent(keyword, value); break;
case Keywords.ApplicationName: ApplicationName = ConvertToString(value); break;
case Keywords.AttachDBFilename: AttachDBFilename = ConvertToString(value); break;
case Keywords.CurrentLanguage: CurrentLanguage = ConvertToString(value); break;
case Keywords.DataSource: DataSource = ConvertToString(value); break;
case Keywords.FailoverPartner: FailoverPartner = ConvertToString(value); break;
case Keywords.InitialCatalog: InitialCatalog = ConvertToString(value); break;
// case Keywords.NamedConnection: NamedConnection = ConvertToString(value); break;
case Keywords.Password: Password = ConvertToString(value); break;
case Keywords.UserID: UserID = ConvertToString(value); break;
case Keywords.TypeSystemVersion: TypeSystemVersion = ConvertToString(value); break;
case Keywords.WorkstationID: WorkstationID = ConvertToString(value); break;
case Keywords.ConnectTimeout: ConnectTimeout = ConvertToInt32(value); break;
case Keywords.LoadBalanceTimeout: LoadBalanceTimeout = ConvertToInt32(value); break;
case Keywords.MaxPoolSize: MaxPoolSize = ConvertToInt32(value); break;
case Keywords.MinPoolSize: MinPoolSize = ConvertToInt32(value); break;
case Keywords.PacketSize: PacketSize = ConvertToInt32(value); break;
case Keywords.IntegratedSecurity: IntegratedSecurity = ConvertToIntegratedSecurity(value); break;
case Keywords.Encrypt: Encrypt = ConvertToBoolean(value); break;
case Keywords.TrustServerCertificate: TrustServerCertificate = ConvertToBoolean(value); break;
case Keywords.MultipleActiveResultSets: MultipleActiveResultSets = ConvertToBoolean(value); break;
case Keywords.MultiSubnetFailover: MultiSubnetFailover = ConvertToBoolean(value); break;
case Keywords.PersistSecurityInfo: PersistSecurityInfo = ConvertToBoolean(value); break;
case Keywords.Pooling: Pooling = ConvertToBoolean(value); break;
case Keywords.Replication: Replication = ConvertToBoolean(value); break;
case Keywords.UserInstance: UserInstance = ConvertToBoolean(value); break;
case Keywords.ConnectRetryCount: ConnectRetryCount = ConvertToInt32(value); break;
case Keywords.ConnectRetryInterval: ConnectRetryInterval = ConvertToInt32(value); break;
default:
Debug.Assert(false, "unexpected keyword");
throw UnsupportedKeyword(keyword);
}
}
else
{
Remove(keyword);
}
}
}
public ApplicationIntent ApplicationIntent
{
get { return _applicationIntent; }
set
{
if (!DbConnectionStringBuilderUtil.IsValidApplicationIntentValue(value))
{
throw ADP.InvalidEnumerationValue(typeof(ApplicationIntent), (int)value);
}
SetApplicationIntentValue(value);
_applicationIntent = value;
}
}
public string ApplicationName
{
get { return _applicationName; }
set
{
SetValue(DbConnectionStringKeywords.ApplicationName, value);
_applicationName = value;
}
}
public string AttachDBFilename
{
get { return _attachDBFilename; }
set
{
SetValue(DbConnectionStringKeywords.AttachDBFilename, value);
_attachDBFilename = value;
}
}
public int ConnectTimeout
{
get { return _connectTimeout; }
set
{
if (value < 0)
{
throw ADP.InvalidConnectionOptionValue(DbConnectionStringKeywords.ConnectTimeout);
}
SetValue(DbConnectionStringKeywords.ConnectTimeout, value);
_connectTimeout = value;
}
}
public string CurrentLanguage
{
get { return _currentLanguage; }
set
{
SetValue(DbConnectionStringKeywords.CurrentLanguage, value);
_currentLanguage = value;
}
}
public string DataSource
{
get { return _dataSource; }
set
{
SetValue(DbConnectionStringKeywords.DataSource, value);
_dataSource = value;
}
}
public bool Encrypt
{
get { return _encrypt; }
set
{
SetValue(DbConnectionStringKeywords.Encrypt, value);
_encrypt = value;
}
}
public bool TrustServerCertificate
{
get { return _trustServerCertificate; }
set
{
SetValue(DbConnectionStringKeywords.TrustServerCertificate, value);
_trustServerCertificate = value;
}
}
public string FailoverPartner
{
get { return _failoverPartner; }
set
{
SetValue(DbConnectionStringKeywords.FailoverPartner, value);
_failoverPartner = value;
}
}
public string InitialCatalog
{
get { return _initialCatalog; }
set
{
SetValue(DbConnectionStringKeywords.InitialCatalog, value);
_initialCatalog = value;
}
}
public bool IntegratedSecurity
{
get { return _integratedSecurity; }
set
{
SetValue(DbConnectionStringKeywords.IntegratedSecurity, value);
_integratedSecurity = value;
}
}
public int LoadBalanceTimeout
{
get { return _loadBalanceTimeout; }
set
{
if (value < 0)
{
throw ADP.InvalidConnectionOptionValue(DbConnectionStringKeywords.LoadBalanceTimeout);
}
SetValue(DbConnectionStringKeywords.LoadBalanceTimeout, value);
_loadBalanceTimeout = value;
}
}
public int MaxPoolSize
{
get { return _maxPoolSize; }
set
{
if (value < 1)
{
throw ADP.InvalidConnectionOptionValue(DbConnectionStringKeywords.MaxPoolSize);
}
SetValue(DbConnectionStringKeywords.MaxPoolSize, value);
_maxPoolSize = value;
}
}
public int ConnectRetryCount
{
get { return _connectRetryCount; }
set
{
if ((value < 0) || (value > 255))
{
throw ADP.InvalidConnectionOptionValue(DbConnectionStringKeywords.ConnectRetryCount);
}
SetValue(DbConnectionStringKeywords.ConnectRetryCount, value);
_connectRetryCount = value;
}
}
public int ConnectRetryInterval
{
get { return _connectRetryInterval; }
set
{
if ((value < 1) || (value > 60))
{
throw ADP.InvalidConnectionOptionValue(DbConnectionStringKeywords.ConnectRetryInterval);
}
SetValue(DbConnectionStringKeywords.ConnectRetryInterval, value);
_connectRetryInterval = value;
}
}
public int MinPoolSize
{
get { return _minPoolSize; }
set
{
if (value < 0)
{
throw ADP.InvalidConnectionOptionValue(DbConnectionStringKeywords.MinPoolSize);
}
SetValue(DbConnectionStringKeywords.MinPoolSize, value);
_minPoolSize = value;
}
}
public bool MultipleActiveResultSets
{
get { return _multipleActiveResultSets; }
set
{
SetValue(DbConnectionStringKeywords.MultipleActiveResultSets, value);
_multipleActiveResultSets = value;
}
}
[SuppressMessage("Microsoft.Naming", "CA1704:IdentifiersShouldBeSpelledCorrectly", Justification = "Reviewed and Approved by UE")]
public bool MultiSubnetFailover
{
get { return _multiSubnetFailover; }
set
{
SetValue(DbConnectionStringKeywords.MultiSubnetFailover, value);
_multiSubnetFailover = value;
}
}
/*
[DisplayName(DbConnectionStringKeywords.NamedConnection)]
[ResCategoryAttribute(Res.DataCategory_NamedConnectionString)]
[ResDescriptionAttribute(Res.DbConnectionString_NamedConnection)]
[RefreshPropertiesAttribute(RefreshProperties.All)]
[TypeConverter(typeof(NamedConnectionStringConverter))]
public string NamedConnection {
get { return _namedConnection; }
set {
SetValue(DbConnectionStringKeywords.NamedConnection, value);
_namedConnection = value;
}
}
*/
public int PacketSize
{
get { return _packetSize; }
set
{
if ((value < TdsEnums.MIN_PACKET_SIZE) || (TdsEnums.MAX_PACKET_SIZE < value))
{
throw SQL.InvalidPacketSizeValue();
}
SetValue(DbConnectionStringKeywords.PacketSize, value);
_packetSize = value;
}
}
public string Password
{
get { return _password; }
set
{
SetValue(DbConnectionStringKeywords.Password, value);
_password = value;
}
}
public bool PersistSecurityInfo
{
get { return _persistSecurityInfo; }
set
{
SetValue(DbConnectionStringKeywords.PersistSecurityInfo, value);
_persistSecurityInfo = value;
}
}
public bool Pooling
{
get { return _pooling; }
set
{
SetValue(DbConnectionStringKeywords.Pooling, value);
_pooling = value;
}
}
public bool Replication
{
get { return _replication; }
set
{
SetValue(DbConnectionStringKeywords.Replication, value);
_replication = value;
}
}
public string TypeSystemVersion
{
get { return _typeSystemVersion; }
set
{
SetValue(DbConnectionStringKeywords.TypeSystemVersion, value);
_typeSystemVersion = value;
}
}
public string UserID
{
get { return _userID; }
set
{
SetValue(DbConnectionStringKeywords.UserID, value);
_userID = value;
}
}
public bool UserInstance
{
get { return _userInstance; }
set
{
SetValue(DbConnectionStringKeywords.UserInstance, value);
_userInstance = value;
}
}
public string WorkstationID
{
get { return _workstationID; }
set
{
SetValue(DbConnectionStringKeywords.WorkstationID, value);
_workstationID = value;
}
}
public override ICollection Keys
{
get
{
return new System.Collections.ObjectModel.ReadOnlyCollection<string>(s_validKeywords);
}
}
public override ICollection Values
{
get
{
// written this way so if the ordering of Keywords & _validKeywords changes
// this is one less place to maintain
object[] values = new object[s_validKeywords.Length];
for (int i = 0; i < values.Length; ++i)
{
values[i] = GetAt((Keywords)i);
}
return new System.Collections.ObjectModel.ReadOnlyCollection<object>(values);
}
}
public override void Clear()
{
base.Clear();
for (int i = 0; i < s_validKeywords.Length; ++i)
{
Reset((Keywords)i);
}
}
public override bool ContainsKey(string keyword)
{
ADP.CheckArgumentNull(keyword, "keyword");
return s_keywords.ContainsKey(keyword);
}
private static bool ConvertToBoolean(object value)
{
return DbConnectionStringBuilderUtil.ConvertToBoolean(value);
}
private static int ConvertToInt32(object value)
{
return DbConnectionStringBuilderUtil.ConvertToInt32(value);
}
private static bool ConvertToIntegratedSecurity(object value)
{
return DbConnectionStringBuilderUtil.ConvertToIntegratedSecurity(value);
}
private static string ConvertToString(object value)
{
return DbConnectionStringBuilderUtil.ConvertToString(value);
}
private static ApplicationIntent ConvertToApplicationIntent(string keyword, object value)
{
return DbConnectionStringBuilderUtil.ConvertToApplicationIntent(keyword, value);
}
private object GetAt(Keywords index)
{
switch (index)
{
case Keywords.ApplicationIntent: return this.ApplicationIntent;
case Keywords.ApplicationName: return ApplicationName;
case Keywords.AttachDBFilename: return AttachDBFilename;
case Keywords.ConnectTimeout: return ConnectTimeout;
case Keywords.CurrentLanguage: return CurrentLanguage;
case Keywords.DataSource: return DataSource;
case Keywords.Encrypt: return Encrypt;
case Keywords.FailoverPartner: return FailoverPartner;
case Keywords.InitialCatalog: return InitialCatalog;
case Keywords.IntegratedSecurity: return IntegratedSecurity;
case Keywords.LoadBalanceTimeout: return LoadBalanceTimeout;
case Keywords.MultipleActiveResultSets: return MultipleActiveResultSets;
case Keywords.MaxPoolSize: return MaxPoolSize;
case Keywords.MinPoolSize: return MinPoolSize;
case Keywords.MultiSubnetFailover: return MultiSubnetFailover;
// case Keywords.NamedConnection: return NamedConnection;
case Keywords.PacketSize: return PacketSize;
case Keywords.Password: return Password;
case Keywords.PersistSecurityInfo: return PersistSecurityInfo;
case Keywords.Pooling: return Pooling;
case Keywords.Replication: return Replication;
case Keywords.TrustServerCertificate: return TrustServerCertificate;
case Keywords.TypeSystemVersion: return TypeSystemVersion;
case Keywords.UserID: return UserID;
case Keywords.UserInstance: return UserInstance;
case Keywords.WorkstationID: return WorkstationID;
case Keywords.ConnectRetryCount: return ConnectRetryCount;
case Keywords.ConnectRetryInterval: return ConnectRetryInterval;
default:
Debug.Assert(false, "unexpected keyword");
throw UnsupportedKeyword(s_validKeywords[(int)index]);
}
}
private Keywords GetIndex(string keyword)
{
ADP.CheckArgumentNull(keyword, "keyword");
Keywords index;
if (s_keywords.TryGetValue(keyword, out index))
{
return index;
}
throw UnsupportedKeyword(keyword);
}
public override bool Remove(string keyword)
{
ADP.CheckArgumentNull(keyword, "keyword");
Keywords index;
if (s_keywords.TryGetValue(keyword, out index))
{
if (base.Remove(s_validKeywords[(int)index]))
{
Reset(index);
return true;
}
}
return false;
}
private void Reset(Keywords index)
{
switch (index)
{
case Keywords.ApplicationIntent:
_applicationIntent = DbConnectionStringDefaults.ApplicationIntent;
break;
case Keywords.ApplicationName:
_applicationName = DbConnectionStringDefaults.ApplicationName;
break;
case Keywords.AttachDBFilename:
_attachDBFilename = DbConnectionStringDefaults.AttachDBFilename;
break;
case Keywords.ConnectTimeout:
_connectTimeout = DbConnectionStringDefaults.ConnectTimeout;
break;
case Keywords.CurrentLanguage:
_currentLanguage = DbConnectionStringDefaults.CurrentLanguage;
break;
case Keywords.DataSource:
_dataSource = DbConnectionStringDefaults.DataSource;
break;
case Keywords.Encrypt:
_encrypt = DbConnectionStringDefaults.Encrypt;
break;
case Keywords.FailoverPartner:
_failoverPartner = DbConnectionStringDefaults.FailoverPartner;
break;
case Keywords.InitialCatalog:
_initialCatalog = DbConnectionStringDefaults.InitialCatalog;
break;
case Keywords.IntegratedSecurity:
_integratedSecurity = DbConnectionStringDefaults.IntegratedSecurity;
break;
case Keywords.LoadBalanceTimeout:
_loadBalanceTimeout = DbConnectionStringDefaults.LoadBalanceTimeout;
break;
case Keywords.MultipleActiveResultSets:
_multipleActiveResultSets = DbConnectionStringDefaults.MultipleActiveResultSets;
break;
case Keywords.MaxPoolSize:
_maxPoolSize = DbConnectionStringDefaults.MaxPoolSize;
break;
case Keywords.MinPoolSize:
_minPoolSize = DbConnectionStringDefaults.MinPoolSize;
break;
case Keywords.MultiSubnetFailover:
_multiSubnetFailover = DbConnectionStringDefaults.MultiSubnetFailover;
break;
// case Keywords.NamedConnection:
// _namedConnection = DbConnectionStringDefaults.NamedConnection;
// break;
case Keywords.PacketSize:
_packetSize = DbConnectionStringDefaults.PacketSize;
break;
case Keywords.Password:
_password = DbConnectionStringDefaults.Password;
break;
case Keywords.PersistSecurityInfo:
_persistSecurityInfo = DbConnectionStringDefaults.PersistSecurityInfo;
break;
case Keywords.Pooling:
_pooling = DbConnectionStringDefaults.Pooling;
break;
case Keywords.ConnectRetryCount:
_connectRetryCount = DbConnectionStringDefaults.ConnectRetryCount;
break;
case Keywords.ConnectRetryInterval:
_connectRetryInterval = DbConnectionStringDefaults.ConnectRetryInterval;
break;
case Keywords.Replication:
_replication = DbConnectionStringDefaults.Replication;
break;
case Keywords.TrustServerCertificate:
_trustServerCertificate = DbConnectionStringDefaults.TrustServerCertificate;
break;
case Keywords.TypeSystemVersion:
_typeSystemVersion = DbConnectionStringDefaults.TypeSystemVersion;
break;
case Keywords.UserID:
_userID = DbConnectionStringDefaults.UserID;
break;
case Keywords.UserInstance:
_userInstance = DbConnectionStringDefaults.UserInstance;
break;
case Keywords.WorkstationID:
_workstationID = DbConnectionStringDefaults.WorkstationID;
break;
default:
Debug.Assert(false, "unexpected keyword");
throw UnsupportedKeyword(s_validKeywords[(int)index]);
}
}
private void SetValue(string keyword, bool value)
{
base[keyword] = value.ToString();
}
private void SetValue(string keyword, int value)
{
base[keyword] = value.ToString((System.IFormatProvider)null);
}
private void SetValue(string keyword, string value)
{
ADP.CheckArgumentNull(value, keyword);
base[keyword] = value;
}
private void SetApplicationIntentValue(ApplicationIntent value)
{
Debug.Assert(DbConnectionStringBuilderUtil.IsValidApplicationIntentValue(value), "invalid value");
base[DbConnectionStringKeywords.ApplicationIntent] = DbConnectionStringBuilderUtil.ApplicationIntentToString(value);
}
public override bool ShouldSerialize(string keyword)
{
ADP.CheckArgumentNull(keyword, "keyword");
Keywords index;
return s_keywords.TryGetValue(keyword, out index) && base.ShouldSerialize(s_validKeywords[(int)index]);
}
public override bool TryGetValue(string keyword, out object value)
{
Keywords index;
if (s_keywords.TryGetValue(keyword, out index))
{
value = GetAt(index);
return true;
}
value = null;
return false;
}
private static readonly string[] s_notSupportedKeywords = new string[] {
DbConnectionStringKeywords.AsynchronousProcessing,
DbConnectionStringKeywords.ConnectionReset,
DbConnectionStringKeywords.ContextConnection,
DbConnectionStringKeywords.Enlist,
DbConnectionStringKeywords.TransactionBinding,
DbConnectionStringSynonyms.Async
};
private static readonly string[] s_notSupportedNetworkLibraryKeywords = new string[] {
DbConnectionStringKeywords.NetworkLibrary,
DbConnectionStringSynonyms.NET,
DbConnectionStringSynonyms.NETWORK
};
private Exception UnsupportedKeyword(string keyword)
{
if (s_notSupportedKeywords.Contains(keyword, StringComparer.OrdinalIgnoreCase))
{
return SQL.UnsupportedKeyword(keyword);
}
else if (s_notSupportedNetworkLibraryKeywords.Contains(keyword, StringComparer.OrdinalIgnoreCase))
{
return SQL.NetworkLibraryKeywordNotSupported();
}
else
{
return ADP.KeywordNotSupported(keyword);
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace AzureLens.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.IO;
using System.Net.Test.Common;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
using Xunit.Abstractions;
namespace System.Net.Http.Functional.Tests
{
using Configuration = System.Net.Test.Common.Configuration;
public abstract class ResponseStreamTest : HttpClientTestBase
{
private readonly ITestOutputHelper _output;
public ResponseStreamTest(ITestOutputHelper output)
{
_output = output;
}
[OuterLoop("Uses external server")]
[Theory]
[InlineData(0)]
[InlineData(1)]
[InlineData(2)]
[InlineData(3)]
[InlineData(4)]
[InlineData(5)]
public async Task GetStreamAsync_ReadToEnd_Success(int readMode)
{
using (HttpClient client = CreateHttpClient())
{
string customHeaderValue = Guid.NewGuid().ToString("N");
client.DefaultRequestHeaders.Add("X-ResponseStreamTest", customHeaderValue);
using (Stream stream = await client.GetStreamAsync(Configuration.Http.RemoteEchoServer))
{
var ms = new MemoryStream();
int bytesRead;
var buffer = new byte[10];
string responseBody;
// Read all of the response content in various ways
switch (readMode)
{
case 0:
// StreamReader.ReadToEnd
responseBody = new StreamReader(stream).ReadToEnd();
break;
case 1:
// StreamReader.ReadToEndAsync
responseBody = await new StreamReader(stream).ReadToEndAsync();
break;
case 2:
// Individual calls to Read(Array)
while ((bytesRead = stream.Read(buffer, 0, buffer.Length)) != 0)
{
ms.Write(buffer, 0, bytesRead);
}
responseBody = Encoding.UTF8.GetString(ms.ToArray());
break;
case 3:
// Individual calls to ReadAsync(Array)
while ((bytesRead = await stream.ReadAsync(buffer, 0, buffer.Length)) != 0)
{
ms.Write(buffer, 0, bytesRead);
}
responseBody = Encoding.UTF8.GetString(ms.ToArray());
break;
case 4:
// Individual calls to Read(Span)
while ((bytesRead = stream.Read(new Span<byte>(buffer))) != 0)
{
ms.Write(buffer, 0, bytesRead);
}
responseBody = Encoding.UTF8.GetString(ms.ToArray());
break;
case 5:
// CopyToAsync
await stream.CopyToAsync(ms);
responseBody = Encoding.UTF8.GetString(ms.ToArray());
break;
default:
throw new Exception($"Unexpected test mode {readMode}");
}
// Calling GetStreamAsync() means we don't have access to the HttpResponseMessage.
// So, we can't use the MD5 hash validation to verify receipt of the response body.
// For this test, we can use a simpler verification of a custom header echo'ing back.
_output.WriteLine(responseBody);
Assert.Contains(customHeaderValue, responseBody);
}
}
}
[OuterLoop("Uses external server")]
[Fact]
public async Task GetAsync_UseResponseHeadersReadAndCallLoadIntoBuffer_Success()
{
using (HttpClient client = CreateHttpClient())
using (HttpResponseMessage response = await client.GetAsync(Configuration.Http.RemoteEchoServer, HttpCompletionOption.ResponseHeadersRead))
{
await response.Content.LoadIntoBufferAsync();
string responseBody = await response.Content.ReadAsStringAsync();
_output.WriteLine(responseBody);
TestHelper.VerifyResponseBody(
responseBody,
response.Content.Headers.ContentMD5,
false,
null);
}
}
[OuterLoop("Uses external server")]
[Fact]
public async Task GetAsync_UseResponseHeadersReadAndCopyToMemoryStream_Success()
{
using (HttpClient client = CreateHttpClient())
using (HttpResponseMessage response = await client.GetAsync(Configuration.Http.RemoteEchoServer, HttpCompletionOption.ResponseHeadersRead))
{
var memoryStream = new MemoryStream();
await response.Content.CopyToAsync(memoryStream);
memoryStream.Position = 0;
using (var reader = new StreamReader(memoryStream))
{
string responseBody = reader.ReadToEnd();
_output.WriteLine(responseBody);
TestHelper.VerifyResponseBody(
responseBody,
response.Content.Headers.ContentMD5,
false,
null);
}
}
}
[OuterLoop("Uses external server")]
[Fact]
public async Task GetStreamAsync_ReadZeroBytes_Success()
{
using (HttpClient client = CreateHttpClient())
using (Stream stream = await client.GetStreamAsync(Configuration.Http.RemoteEchoServer))
{
Assert.Equal(0, stream.Read(new byte[1], 0, 0));
Assert.Equal(0, stream.Read(new Span<byte>(new byte[1], 0, 0)));
Assert.Equal(0, await stream.ReadAsync(new byte[1], 0, 0));
}
}
[OuterLoop("Uses external server")]
[Fact]
public async Task ReadAsStreamAsync_Cancel_TaskIsCanceled()
{
var cts = new CancellationTokenSource();
using (HttpClient client = CreateHttpClient())
using (HttpResponseMessage response =
await client.GetAsync(Configuration.Http.RemoteEchoServer, HttpCompletionOption.ResponseHeadersRead))
using (Stream stream = await response.Content.ReadAsStreamAsync())
{
var buffer = new byte[2048];
Task task = stream.ReadAsync(buffer, 0, buffer.Length, cts.Token);
cts.Cancel();
// Verify that the task completed.
Assert.True(((IAsyncResult)task).AsyncWaitHandle.WaitOne(new TimeSpan(0, 5, 0)));
Assert.True(task.IsCompleted, "Task was not yet completed");
// Verify that the task completed successfully or is canceled.
if (IsWinHttpHandler)
{
// With WinHttpHandler, we may fault because canceling the task destroys the request handle
// which may randomly cause an ObjectDisposedException (or other exception).
Assert.True(
task.Status == TaskStatus.RanToCompletion ||
task.Status == TaskStatus.Canceled ||
task.Status == TaskStatus.Faulted);
}
else
{
if (task.IsFaulted)
{
// Propagate exception for debugging
task.GetAwaiter().GetResult();
}
Assert.True(
task.Status == TaskStatus.RanToCompletion ||
task.Status == TaskStatus.Canceled);
}
}
}
[SkipOnTargetFramework(TargetFrameworkMonikers.Uap, "WinRT based Http stack ignores these errors")]
[Theory]
[InlineData(TransferType.ContentLength, TransferError.ContentLengthTooLarge)]
[InlineData(TransferType.Chunked, TransferError.MissingChunkTerminator)]
[InlineData(TransferType.Chunked, TransferError.ChunkSizeTooLarge)]
public async Task ReadAsStreamAsync_InvalidServerResponse_ThrowsIOException(
TransferType transferType,
TransferError transferError)
{
await StartTransferTypeAndErrorServer(transferType, transferError, async uri =>
{
await Assert.ThrowsAsync<IOException>(() => ReadAsStreamHelper(uri));
});
}
[Theory]
[InlineData(TransferType.None, TransferError.None)]
[InlineData(TransferType.ContentLength, TransferError.None)]
[InlineData(TransferType.Chunked, TransferError.None)]
public async Task ReadAsStreamAsync_ValidServerResponse_Success(
TransferType transferType,
TransferError transferError)
{
await StartTransferTypeAndErrorServer(transferType, transferError, async uri =>
{
await ReadAsStreamHelper(uri);
});
}
public enum TransferType
{
None = 0,
ContentLength,
Chunked
}
public enum TransferError
{
None = 0,
ContentLengthTooLarge,
ChunkSizeTooLarge,
MissingChunkTerminator
}
public static Task StartTransferTypeAndErrorServer(
TransferType transferType,
TransferError transferError,
Func<Uri, Task> clientFunc)
{
return LoopbackServer.CreateClientAndServerAsync(
clientFunc,
server => server.AcceptConnectionAsync(async connection =>
{
// Read past request headers.
await connection.ReadRequestHeaderAsync();
// Determine response transfer headers.
string transferHeader = null;
string content = "This is some response content.";
if (transferType == TransferType.ContentLength)
{
transferHeader = transferError == TransferError.ContentLengthTooLarge ?
$"Content-Length: {content.Length + 42}\r\n" :
$"Content-Length: {content.Length}\r\n";
}
else if (transferType == TransferType.Chunked)
{
transferHeader = "Transfer-Encoding: chunked\r\n";
}
// Write response header
TextWriter writer = connection.Writer;
await writer.WriteAsync("HTTP/1.1 200 OK\r\n").ConfigureAwait(false);
await writer.WriteAsync($"Date: {DateTimeOffset.UtcNow:R}\r\n").ConfigureAwait(false);
await writer.WriteAsync("Content-Type: text/plain\r\n").ConfigureAwait(false);
if (!string.IsNullOrEmpty(transferHeader))
{
await writer.WriteAsync(transferHeader).ConfigureAwait(false);
}
await writer.WriteAsync("\r\n").ConfigureAwait(false);
// Write response body
if (transferType == TransferType.Chunked)
{
string chunkSizeInHex = string.Format(
"{0:x}\r\n",
content.Length + (transferError == TransferError.ChunkSizeTooLarge ? 42 : 0));
await writer.WriteAsync(chunkSizeInHex).ConfigureAwait(false);
await writer.WriteAsync($"{content}\r\n").ConfigureAwait(false);
if (transferError != TransferError.MissingChunkTerminator)
{
await writer.WriteAsync("0\r\n\r\n").ConfigureAwait(false);
}
}
else
{
await writer.WriteAsync($"{content}").ConfigureAwait(false);
}
}));
}
private async Task ReadAsStreamHelper(Uri serverUri)
{
using (HttpClient client = CreateHttpClient())
{
using (var response = await client.GetAsync(
serverUri,
HttpCompletionOption.ResponseHeadersRead))
using (var stream = await response.Content.ReadAsStreamAsync())
{
var buffer = new byte[1];
while (await stream.ReadAsync(buffer, 0, 1) > 0) ;
}
}
}
}
}
| |
using System.Collections.Generic;
namespace ProtoCore.AST.ImperativeAST
{
public abstract class ImperativeNode : Node
{
public ImperativeNode()
{
}
public ImperativeNode(ImperativeNode rhs) : base(rhs)
{
}
public override bool Compare(Node other)
{
return base.Compare(other);
}
}
public class LanguageBlockNode : ImperativeNode
{
public LanguageBlockNode()
{
codeblock = new ProtoCore.LanguageCodeBlock();
Attributes = new List<ImperativeNode>();
}
public LanguageBlockNode(LanguageBlockNode rhs) : base(rhs)
{
codeblock = new ProtoCore.LanguageCodeBlock(rhs.codeblock);
Attributes = new List<ImperativeNode>();
foreach (ImperativeNode aNode in rhs.Attributes)
{
ImperativeNode newNode = ProtoCore.Utils.NodeUtils.Clone(aNode);
Attributes.Add(newNode);
}
}
public List<ImperativeNode> Attributes { get; set; }
public ProtoCore.LanguageCodeBlock codeblock { get; set; }
public Node CodeBlockNode { get; set; }
}
public class ArrayNameNode : ImperativeNode
{
public ArrayNode ArrayDimensions { get; set; }
public ArrayNameNode()
{
ArrayDimensions = null;
}
public ArrayNameNode(ArrayNameNode rhs) : base(rhs)
{
ArrayDimensions = null;
if (null != rhs.ArrayDimensions)
{
ArrayDimensions = new ArrayNode(rhs.ArrayDimensions);
}
}
}
public class GroupExpressionNode : ArrayNameNode
{
public ImperativeNode Expression { get; set; }
}
public class IdentifierNode : ArrayNameNode
{
public IdentifierNode()
{
ArrayDimensions = null;
datatype = new ProtoCore.Type
{
UID = (int)PrimitiveType.kInvalidType,
rank = 0,
IsIndexable = false,
Name = null
};
}
public IdentifierNode(IdentifierNode rhs) : base(rhs)
{
datatype = new ProtoCore.Type
{
UID = rhs.datatype.UID,
rank = rhs.datatype.rank,
IsIndexable = rhs.datatype.IsIndexable,
Name = rhs.datatype.Name
};
Value = rhs.Value;
}
public ProtoCore.Type datatype { get; set; }
public string Value { get; set; }
public string ArrayName { get; set; }
}
public class TypedIdentifierNode: IdentifierNode
{
}
public class IntNode : ImperativeNode
{
public string value { get; set; }
public IntNode()
{
value = string.Empty;
}
public IntNode(IntNode rhs)
: base(rhs)
{
value = rhs.value;
}
}
public class DoubleNode : ImperativeNode
{
public string value { get; set; }
public DoubleNode()
{
value = string.Empty;
}
public DoubleNode(DoubleNode rhs)
: base(rhs)
{
value = rhs.value;
}
}
public class BooleanNode : ImperativeNode
{
public string value { get; set; }
public BooleanNode()
{
value = string.Empty;
}
public BooleanNode(BooleanNode rhs)
: base(rhs)
{
value = rhs.value;
}
}
public class CharNode : ImperativeNode
{
public string value { get; set; }
public CharNode()
{
value = string.Empty;
}
public CharNode(CharNode rhs)
{
value = rhs.value;
}
}
public class StringNode : ImperativeNode
{
public string value { get; set; }
public StringNode()
{
value = string.Empty;
}
public StringNode(StringNode rhs)
: base(rhs)
{
value = rhs.value;
}
}
public class NullNode : ImperativeNode
{
}
public class ArrayNode : ImperativeNode
{
public ArrayNode()
{
Expr = null;
Type = null;
}
public ArrayNode(ArrayNode rhs)
: base(rhs)
{
Expr = null;
Type = null;
if (null != rhs)
{
if (null != rhs.Expr)
{
Expr = ProtoCore.Utils.NodeUtils.Clone(rhs.Expr);
}
if (null != rhs.Type)
{
Type = ProtoCore.Utils.NodeUtils.Clone(rhs.Type);
}
}
}
public ImperativeNode Expr { get; set; }
public ImperativeNode Type { get; set; }
}
public class FunctionCallNode : ArrayNameNode
{
public ImperativeNode Function
{
get;
set;
}
public List<ImperativeNode> FormalArguments
{
get;
set;
}
public FunctionCallNode()
{
FormalArguments = new List<ImperativeNode>();
}
public FunctionCallNode(FunctionCallNode rhs) : base(rhs)
{
Function = ProtoCore.Utils.NodeUtils.Clone(rhs.Function);
FormalArguments = new List<ImperativeNode>();
foreach (ImperativeNode argNode in rhs.FormalArguments)
{
ImperativeNode tempNode = ProtoCore.Utils.NodeUtils.Clone(argNode);
FormalArguments.Add(tempNode);
}
}
}
public class VarDeclNode : ImperativeNode
{
public VarDeclNode()
{
memregion = ProtoCore.DSASM.MemoryRegion.kInvalidRegion;
}
public ProtoCore.DSASM.MemoryRegion memregion { get; set; }
public ProtoCore.Type ArgumentType { get; set; }
public ImperativeNode NameNode { get; set; }
}
public class ReturnNode : ImperativeNode
{
public ImperativeNode ReturnExpr { get; set; }
}
public class ArgumentSignatureNode : ImperativeNode
{
public ArgumentSignatureNode()
{
Arguments = new List<VarDeclNode>();
}
public List<VarDeclNode> Arguments { get; set; }
public void AddArgument(VarDeclNode arg)
{
Arguments.Add(arg);
}
}
public class ExprListNode : ArrayNameNode
{
public ExprListNode()
{
list = new List<ImperativeNode>();
}
public ExprListNode(ExprListNode rhs)
: base(rhs)
{
list = new List<ImperativeNode>();
foreach (ImperativeNode argNode in rhs.list)
{
ImperativeNode tempNode = ProtoCore.Utils.NodeUtils.Clone(argNode);
list.Add(tempNode);
}
}
public List<ImperativeNode> list { get; set; }
}
public class CodeBlockNode : ImperativeNode
{
public CodeBlockNode()
{
Body = new List<ImperativeNode>();
}
public CodeBlockNode(CodeBlockNode rhs) : base(rhs)
{
Body = new List<ImperativeNode>();
foreach (ImperativeNode aNode in rhs.Body)
{
ImperativeNode newNode = ProtoCore.Utils.NodeUtils.Clone(aNode);
Body.Add(newNode);
}
}
public List<ImperativeNode> Body { get; set; }
}
public class ConstructorDefinitionNode : ImperativeNode
{
public int localVars { get; set; }
public ArgumentSignatureNode Signature { get; set; }
public CodeBlockNode FunctionBody { get; set; }
}
public class FunctionDefinitionNode : ImperativeNode
{
public int localVars { get; set; }
public List<ImperativeNode> Attributes { get; set; }
public CodeBlockNode FunctionBody { get; set; }
public ProtoCore.Type ReturnType { get; set; }
public ArgumentSignatureNode Signature { get; set; }
}
public class InlineConditionalNode : ImperativeNode
{
public ImperativeNode ConditionExpression { get; set; }
public ImperativeNode TrueExpression { get; set; }
public ImperativeNode FalseExpression { get; set; }
}
public class BinaryExpressionNode : ImperativeNode
{
public ImperativeNode LeftNode { get; set; }
public ProtoCore.DSASM.Operator Optr { get; set; }
public ImperativeNode RightNode { get; set; }
public BinaryExpressionNode()
{
}
public BinaryExpressionNode(BinaryExpressionNode rhs) : base(rhs)
{
Optr = rhs.Optr;
LeftNode = rhs.LeftNode == null ? null : ProtoCore.Utils.NodeUtils.Clone(rhs.LeftNode);
RightNode = rhs.RightNode == null ? null : ProtoCore.Utils.NodeUtils.Clone(rhs.RightNode);
}
}
public class ElseIfBlock : ImperativeNode
{
public ElseIfBlock()
{
Body = new List<ImperativeNode>();
ElseIfBodyPosition = new IfStmtPositionNode();
}
public ElseIfBlock(ElseIfBlock rhs) : base(rhs)
{
Expr = ProtoCore.Utils.NodeUtils.Clone(rhs.Expr);
ElseIfBodyPosition = ProtoCore.Utils.NodeUtils.Clone(rhs.ElseIfBodyPosition);
Body = new List<ImperativeNode>();
foreach (ImperativeNode iNode in rhs.Body)
{
ImperativeNode newNode = ProtoCore.Utils.NodeUtils.Clone(iNode);
Body.Add(newNode);
}
}
public ImperativeNode Expr { get; set; }
public List<ImperativeNode> Body { get; set; }
public ImperativeNode ElseIfBodyPosition { get; set; }
}
public class IfStmtPositionNode: ImperativeNode
{
public IfStmtPositionNode()
{
}
public IfStmtPositionNode(IfStmtPositionNode rhs):base(rhs)
{
}
}
public class IfStmtNode : ImperativeNode
{
public IfStmtNode()
{
ElseIfList = new List<ElseIfBlock>();
IfBody = new List<ImperativeNode>();
IfBodyPosition = new IfStmtPositionNode();
ElseBody = new List<ImperativeNode>();
ElseBodyPosition = new IfStmtPositionNode();
}
public IfStmtNode(IfStmtNode rhs) : base(rhs)
{
//
IfExprNode = ProtoCore.Utils.NodeUtils.Clone(rhs.IfExprNode);
//
IfBody = new List<ImperativeNode>();
foreach (ImperativeNode stmt in rhs.IfBody)
{
ImperativeNode body = ProtoCore.Utils.NodeUtils.Clone(stmt as ImperativeNode);
IfBody.Add(body);
}
//
IfBodyPosition = ProtoCore.Utils.NodeUtils.Clone(rhs.IfBodyPosition);
//
ElseIfList = new List<ElseIfBlock>();
foreach (ElseIfBlock elseBlock in rhs.ElseIfList)
{
ImperativeNode elseNode = ProtoCore.Utils.NodeUtils.Clone(elseBlock as ImperativeNode);
ElseIfList.Add(elseNode as ElseIfBlock);
}
//
ElseBody = new List<ImperativeNode>();
foreach (ImperativeNode stmt in rhs.ElseBody)
{
ImperativeNode tmpNode = ProtoCore.Utils.NodeUtils.Clone(stmt);
ElseBody.Add(tmpNode);
}
//
ElseBodyPosition = ProtoCore.Utils.NodeUtils.Clone(rhs.ElseBodyPosition);
}
public ImperativeNode IfExprNode { get; set; }
public List<ImperativeNode> IfBody { get; set; }
public ImperativeNode IfBodyPosition { get; set; }
public List<ElseIfBlock> ElseIfList { get; set; }
public List<ImperativeNode> ElseBody { get; set; }
public ImperativeNode ElseBodyPosition { get; set; }
}
public class WhileStmtNode : ImperativeNode
{
public WhileStmtNode()
{
Body = new List<ImperativeNode>();
}
public WhileStmtNode(WhileStmtNode rhs) : base(rhs)
{
Expr = ProtoCore.Utils.NodeUtils.Clone(rhs.Expr);
Body = new List<ImperativeNode>();
foreach (ImperativeNode iNode in rhs.Body)
{
ImperativeNode newNode = ProtoCore.Utils.NodeUtils.Clone(iNode);
Body.Add(newNode);
}
}
public ImperativeNode Expr { get; set; }
public List<ImperativeNode> Body { get; set; }
}
public class UnaryExpressionNode : ImperativeNode
{
public ProtoCore.DSASM.UnaryOperator Operator { get; set; }
public ImperativeNode Expression { get; set; }
public UnaryExpressionNode()
{
}
public UnaryExpressionNode(UnaryExpressionNode rhs) : base(rhs)
{
Operator = rhs.Operator;
Expression = ProtoCore.Utils.NodeUtils.Clone(rhs.Expression);
}
}
public class RangeExprNode : ArrayNameNode
{
public ImperativeNode FromNode { get; set; }
public ImperativeNode ToNode { get; set; }
public ImperativeNode StepNode { get; set; }
public ProtoCore.DSASM.RangeStepOperator stepoperator { get; set; }
public RangeExprNode()
{
}
public RangeExprNode(RangeExprNode rhs) : base(rhs)
{
FromNode = ProtoCore.Utils.NodeUtils.Clone(rhs.FromNode);
ToNode = ProtoCore.Utils.NodeUtils.Clone(rhs.ToNode);
if (null != rhs.StepNode)
{
StepNode = ProtoCore.Utils.NodeUtils.Clone(rhs.StepNode);
}
stepoperator = rhs.stepoperator;
}
}
public class ForLoopNode : ImperativeNode
{
public ForLoopNode()
{
body = new List<ImperativeNode>();
}
public ForLoopNode(ForLoopNode rhs) : base(rhs)
{
body = new List<ImperativeNode>();
foreach (ImperativeNode iNode in rhs.body)
{
ImperativeNode newNode = ProtoCore.Utils.NodeUtils.Clone(iNode);
body.Add(newNode);
}
loopVar = ProtoCore.Utils.NodeUtils.Clone(rhs.loopVar);
expression = ProtoCore.Utils.NodeUtils.Clone(rhs.expression);
KwForLine = rhs.KwForLine;
KwForCol = rhs.KwForCol;
KwInLine = rhs.KwInLine;
KwInCol = rhs.KwInCol;
}
public int KwForLine { get; set; }
public int KwForCol { get; set; }
public int KwInLine { get; set; }
public int KwInCol { get; set; }
public ImperativeNode loopVar { get; set; }
public ImperativeNode expression { get; set; }
public List<ImperativeNode> body { get; set; }
}
public class IdentifierListNode : ImperativeNode
{
public ImperativeNode LeftNode { get; set; }
public ProtoCore.DSASM.Operator Optr { get; set; }
public ImperativeNode RightNode { get; set; }
public IdentifierListNode()
{
}
public IdentifierListNode(IdentifierListNode rhs) : base(rhs)
{
Optr = rhs.Optr;
LeftNode = ProtoCore.Utils.NodeUtils.Clone(rhs.LeftNode);
RightNode = ProtoCore.Utils.NodeUtils.Clone(rhs.RightNode);
}
}
public class PostFixNode : ImperativeNode
{
public ImperativeNode Identifier { get; set; }
public ProtoCore.DSASM.UnaryOperator Operator { get; set; }
}
public class BreakNode: ImperativeNode
{
}
public class ContinueNode: ImperativeNode
{
}
public class DefaultArgNode : ImperativeNode
{// not supposed to be used in parser
}
public class ThrowNode : ImperativeNode
{
public ImperativeNode expression { get; set; }
}
public class TryBlockNode : ImperativeNode
{
public List<ImperativeNode> body { get; set; }
}
public class CatchFilterNode : ImperativeNode
{
public IdentifierNode var { get; set; }
public ProtoCore.Type type { get; set; }
}
public class CatchBlockNode : ImperativeNode
{
public CatchFilterNode catchFilter { get; set; }
public List<ImperativeNode> body { get; set; }
}
public class ExceptionHandlingNode : ImperativeNode
{
public TryBlockNode tryBlock { get; set; }
public List<CatchBlockNode> catchBlocks { get; set; }
public ExceptionHandlingNode()
{
catchBlocks = new List<CatchBlockNode>();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Collections.Generic;
using System.Runtime.InteropServices;
using System.Threading;
using System.Threading.Tasks;
using Xunit;
namespace System.IO.Pipes.Tests
{
/// <summary>
/// Tests that cover Read and ReadAsync behaviors that are shared between
/// AnonymousPipes and NamedPipes
/// </summary>
public abstract partial class PipeTest_Read : PipeTestBase
{
[Fact]
public void ReadWithNullBuffer_Throws_ArgumentNullException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
Assert.True(pipe.IsConnected);
Assert.True(pipe.CanRead);
// Null is an invalid Buffer
AssertExtensions.Throws<ArgumentNullException>("buffer", () => pipe.Read(null, 0, 1));
AssertExtensions.Throws<ArgumentNullException>("buffer", () => { pipe.ReadAsync(null, 0, 1); });
// Buffer validity is checked before Offset
AssertExtensions.Throws<ArgumentNullException>("buffer", () => pipe.Read(null, -1, 1));
AssertExtensions.Throws<ArgumentNullException>("buffer", () => { pipe.ReadAsync(null, -1, 1); });
// Buffer validity is checked before Count
AssertExtensions.Throws<ArgumentNullException>("buffer", () => pipe.Read(null, -1, -1));
AssertExtensions.Throws<ArgumentNullException>("buffer", () => { pipe.ReadAsync(null, -1, -1); });
}
}
[Fact]
public void ReadWithNegativeOffset_Throws_ArgumentOutOfRangeException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
Assert.True(pipe.IsConnected);
Assert.True(pipe.CanRead);
// Offset must be nonnegative
AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => pipe.Read(new byte[6], -1, 1));
AssertExtensions.Throws<ArgumentOutOfRangeException>("offset", () => { pipe.ReadAsync(new byte[4], -1, 1); });
}
}
[Fact]
public void ReadWithNegativeCount_Throws_ArgumentOutOfRangeException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
Assert.True(pipe.IsConnected);
Assert.True(pipe.CanRead);
// Count must be nonnegative
AssertExtensions.Throws<ArgumentOutOfRangeException>("count", () => pipe.Read(new byte[3], 0, -1));
AssertExtensions.Throws<System.ArgumentOutOfRangeException>("count", () => { pipe.ReadAsync(new byte[7], 0, -1); });
}
}
[Fact]
public void ReadWithOutOfBoundsArray_Throws_ArgumentException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
Assert.True(pipe.IsConnected);
Assert.True(pipe.CanRead);
// offset out of bounds
Assert.Throws<ArgumentException>(null, () => pipe.Read(new byte[1], 1, 1));
// offset out of bounds for 0 count read
Assert.Throws<ArgumentException>(null, () => pipe.Read(new byte[1], 2, 0));
// offset out of bounds even for 0 length buffer
Assert.Throws<ArgumentException>(null, () => pipe.Read(new byte[0], 1, 0));
// combination offset and count out of bounds
Assert.Throws<ArgumentException>(null, () => pipe.Read(new byte[2], 1, 2));
// edges
Assert.Throws<ArgumentException>(null, () => pipe.Read(new byte[0], int.MaxValue, 0));
Assert.Throws<ArgumentException>(null, () => pipe.Read(new byte[0], int.MaxValue, int.MaxValue));
Assert.Throws<ArgumentException>(() => pipe.Read(new byte[5], 3, 4));
// offset out of bounds
Assert.Throws<ArgumentException>(null, () => { pipe.ReadAsync(new byte[1], 1, 1); });
// offset out of bounds for 0 count read
Assert.Throws<ArgumentException>(null, () => { pipe.ReadAsync(new byte[1], 2, 0); });
// offset out of bounds even for 0 length buffer
Assert.Throws<ArgumentException>(null, () => { pipe.ReadAsync(new byte[0], 1, 0); });
// combination offset and count out of bounds
Assert.Throws<ArgumentException>(null, () => { pipe.ReadAsync(new byte[2], 1, 2); });
// edges
Assert.Throws<ArgumentException>(null, () => { pipe.ReadAsync(new byte[0], int.MaxValue, 0); });
Assert.Throws<ArgumentException>(null, () => { pipe.ReadAsync(new byte[0], int.MaxValue, int.MaxValue); });
Assert.Throws<ArgumentException>(() => { pipe.ReadAsync(new byte[5], 3, 4); });
}
}
[Fact]
public virtual void WriteToReadOnlyPipe_Throws_NotSupportedException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
Assert.True(pipe.IsConnected);
Assert.False(pipe.CanWrite);
Assert.False(pipe.CanSeek);
Assert.Throws<NotSupportedException>(() => pipe.Write(new byte[5], 0, 5));
Assert.Throws<NotSupportedException>(() => pipe.WriteByte(123));
Assert.Throws<NotSupportedException>(() => pipe.Flush());
Assert.Throws<NotSupportedException>(() => pipe.OutBufferSize);
Assert.Throws<NotSupportedException>(() => pipe.WaitForPipeDrain());
Assert.Throws<NotSupportedException>(() => { pipe.WriteAsync(new byte[5], 0, 5); });
}
}
[Fact]
public async Task ReadWithZeroLengthBuffer_Nop()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
var buffer = new byte[] { };
Assert.Equal(0, pipe.Read(buffer, 0, 0));
Task<int> read = pipe.ReadAsync(buffer, 0, 0);
Assert.Equal(0, await read);
}
}
[Fact]
public void ReadPipeUnsupportedMembers_Throws_NotSupportedException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
Assert.True(pipe.IsConnected);
Assert.Throws<NotSupportedException>(() => pipe.Length);
Assert.Throws<NotSupportedException>(() => pipe.SetLength(10L));
Assert.Throws<NotSupportedException>(() => pipe.Position);
Assert.Throws<NotSupportedException>(() => pipe.Position = 10L);
Assert.Throws<NotSupportedException>(() => pipe.Seek(10L, System.IO.SeekOrigin.Begin));
}
}
[Fact]
public void ReadOnDisposedReadablePipe_Throws_ObjectDisposedException()
{
using (ServerClientPair pair = CreateServerClientPair())
{
PipeStream pipe = pair.readablePipe;
pipe.Dispose();
byte[] buffer = new byte[] { 0, 0, 0, 0 };
Assert.Throws<ObjectDisposedException>(() => pipe.Flush());
Assert.Throws<ObjectDisposedException>(() => pipe.Read(buffer, 0, buffer.Length));
Assert.Throws<ObjectDisposedException>(() => pipe.ReadByte());
Assert.Throws<ObjectDisposedException>(() => { pipe.ReadAsync(buffer, 0, buffer.Length); });
Assert.Throws<ObjectDisposedException>(() => pipe.IsMessageComplete);
Assert.Throws<ObjectDisposedException>(() => pipe.ReadMode);
}
}
[Fact]
public void CopyToAsync_InvalidArgs_Throws()
{
using (ServerClientPair pair = CreateServerClientPair())
{
AssertExtensions.Throws<ArgumentNullException>("destination", () => { pair.readablePipe.CopyToAsync(null); });
AssertExtensions.Throws<ArgumentOutOfRangeException>("bufferSize", () => { pair.readablePipe.CopyToAsync(new MemoryStream(), 0); });
Assert.Throws<NotSupportedException>(() => { pair.readablePipe.CopyToAsync(new MemoryStream(new byte[1], writable: false)); });
if (!pair.writeablePipe.CanRead)
{
Assert.Throws<NotSupportedException>(() => { pair.writeablePipe.CopyToAsync(new MemoryStream()); });
}
}
}
[Fact]
[SkipOnTargetFramework(TargetFrameworkMonikers.NetFramework, "There is a bug in netfx around async read on a broken PipeStream. See #2601 and #2899. This bug is fixed in netcore.")]
public virtual async Task ReadFromPipeWithClosedPartner_ReadNoBytes()
{
using (ServerClientPair pair = CreateServerClientPair())
{
pair.writeablePipe.Dispose();
byte[] buffer = new byte[] { 0, 0, 0, 0 };
// The pipe won't be marked as Broken until the first read, so prime it
// to test both the case where it's not yet marked as "Broken" and then
// where it is.
Assert.Equal(0, pair.readablePipe.Read(buffer, 0, buffer.Length));
Assert.Equal(0, pair.readablePipe.Read(buffer, 0, buffer.Length));
Assert.Equal(-1, pair.readablePipe.ReadByte());
Assert.Equal(0, await pair.readablePipe.ReadAsync(buffer, 0, buffer.Length));
}
}
[Fact]
public async Task ValidWriteAsync_ValidReadAsync()
{
using (ServerClientPair pair = CreateServerClientPair())
{
Assert.True(pair.writeablePipe.IsConnected);
Assert.True(pair.readablePipe.IsConnected);
byte[] sent = new byte[] { 123, 0, 5 };
byte[] received = new byte[] { 0, 0, 0 };
Task write = pair.writeablePipe.WriteAsync(sent, 0, sent.Length);
Assert.Equal(sent.Length, await pair.readablePipe.ReadAsync(received, 0, sent.Length));
Assert.Equal(sent, received);
await write;
}
}
[Fact]
public void ValidWrite_ValidRead()
{
using (ServerClientPair pair = CreateServerClientPair())
{
Assert.True(pair.writeablePipe.IsConnected);
Assert.True(pair.readablePipe.IsConnected);
byte[] sent = new byte[] { 123, 0, 5 };
byte[] received = new byte[] { 0, 0, 0 };
Task.Run(() => { pair.writeablePipe.Write(sent, 0, sent.Length); });
Assert.Equal(sent.Length, pair.readablePipe.Read(received, 0, sent.Length));
Assert.Equal(sent, received);
if (RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) // WaitForPipeDrain isn't supported on Unix
pair.writeablePipe.WaitForPipeDrain();
}
}
[Fact]
public void ValidWriteByte_ValidReadByte()
{
using (ServerClientPair pair = CreateServerClientPair())
{
Assert.True(pair.writeablePipe.IsConnected);
Assert.True(pair.readablePipe.IsConnected);
Task.Run(() => pair.writeablePipe.WriteByte(123));
Assert.Equal(123, pair.readablePipe.ReadByte());
}
}
[Theory]
[OuterLoop]
[MemberData(nameof(AsyncReadWriteChain_MemberData))]
public async Task AsyncReadWriteChain_ReadWrite(int iterations, int writeBufferSize, int readBufferSize, bool cancelableToken)
{
var writeBuffer = new byte[writeBufferSize];
var readBuffer = new byte[readBufferSize];
var rand = new Random();
var cancellationToken = cancelableToken ? new CancellationTokenSource().Token : CancellationToken.None;
using (ServerClientPair pair = CreateServerClientPair())
{
// Repeatedly and asynchronously write to the writable pipe and read from the readable pipe,
// verifying that the correct data made it through.
for (int iter = 0; iter < iterations; iter++)
{
rand.NextBytes(writeBuffer);
Task writerTask = pair.writeablePipe.WriteAsync(writeBuffer, 0, writeBuffer.Length, cancellationToken);
int totalRead = 0;
while (totalRead < writeBuffer.Length)
{
int numRead = await pair.readablePipe.ReadAsync(readBuffer, 0, readBuffer.Length, cancellationToken);
Assert.True(numRead > 0);
Assert.Equal<byte>(
new ArraySegment<byte>(writeBuffer, totalRead, numRead),
new ArraySegment<byte>(readBuffer, 0, numRead));
totalRead += numRead;
}
Assert.Equal(writeBuffer.Length, totalRead);
await writerTask;
}
}
}
[Theory]
[OuterLoop]
[MemberData(nameof(AsyncReadWriteChain_MemberData))]
public async Task AsyncReadWriteChain_CopyToAsync(int iterations, int writeBufferSize, int readBufferSize, bool cancelableToken)
{
var writeBuffer = new byte[writeBufferSize * iterations];
new Random().NextBytes(writeBuffer);
var cancellationToken = cancelableToken ? new CancellationTokenSource().Token : CancellationToken.None;
using (ServerClientPair pair = CreateServerClientPair())
{
var readData = new MemoryStream();
Task copyTask = pair.readablePipe.CopyToAsync(readData, readBufferSize, cancellationToken);
for (int iter = 0; iter < iterations; iter++)
{
await pair.writeablePipe.WriteAsync(writeBuffer, iter * writeBufferSize, writeBufferSize, cancellationToken);
}
pair.writeablePipe.Dispose();
await copyTask;
Assert.Equal(writeBuffer.Length, readData.Length);
Assert.Equal(writeBuffer, readData.ToArray());
}
}
public static IEnumerable<object[]> AsyncReadWriteChain_MemberData()
{
foreach (bool cancelableToken in new[] { true, false })
{
yield return new object[] { 5000, 1, 1, cancelableToken }; // very small buffers
yield return new object[] { 500, 21, 18, cancelableToken }; // lots of iterations, with read buffer smaller than write buffer
yield return new object[] { 500, 18, 21, cancelableToken }; // lots of iterations, with write buffer smaller than read buffer
yield return new object[] { 5, 128000, 64000, cancelableToken }; // very large buffers
}
}
[Fact]
public async Task ValidWriteAsync_ValidReadAsync_APM()
{
using (ServerClientPair pair = CreateServerClientPair())
{
Assert.True(pair.writeablePipe.IsConnected);
Assert.True(pair.readablePipe.IsConnected);
byte[] sent = new byte[] { 123, 0, 5 };
byte[] received = new byte[] { 0, 0, 0 };
Task write = Task.Factory.FromAsync<byte[], int, int>(pair.writeablePipe.BeginWrite, pair.writeablePipe.EndWrite, sent, 0, sent.Length, null);
Task<int> read = Task.Factory.FromAsync<byte[], int, int, int>(pair.readablePipe.BeginRead, pair.readablePipe.EndRead, received, 0, received.Length, null);
Assert.Equal(sent.Length, await read);
Assert.Equal(sent, received);
await write;
}
}
[Theory]
[OuterLoop]
[MemberData(nameof(AsyncReadWriteChain_MemberData))]
public async Task AsyncReadWriteChain_ReadWrite_APM(int iterations, int writeBufferSize, int readBufferSize, bool cancelableToken)
{
var writeBuffer = new byte[writeBufferSize];
var readBuffer = new byte[readBufferSize];
var rand = new Random();
var cancellationToken = cancelableToken ? new CancellationTokenSource().Token : CancellationToken.None;
using (ServerClientPair pair = CreateServerClientPair())
{
// Repeatedly and asynchronously write to the writable pipe and read from the readable pipe,
// verifying that the correct data made it through.
for (int iter = 0; iter < iterations; iter++)
{
rand.NextBytes(writeBuffer);
Task write = Task.Factory.FromAsync<byte[], int, int>(pair.writeablePipe.BeginWrite, pair.writeablePipe.EndWrite, writeBuffer, 0, writeBuffer.Length, null);
int totalRead = 0;
while (totalRead < writeBuffer.Length)
{
Task<int> read = Task.Factory.FromAsync<byte[], int, int, int>(pair.readablePipe.BeginRead, pair.readablePipe.EndRead, readBuffer, 0, readBuffer.Length, null);
int numRead = await read;
Assert.True(numRead > 0);
Assert.Equal<byte>(
new ArraySegment<byte>(writeBuffer, totalRead, numRead),
new ArraySegment<byte>(readBuffer, 0, numRead));
totalRead += numRead;
}
Assert.Equal(writeBuffer.Length, totalRead);
await write;
}
}
}
}
}
| |
//
// BansheeDbConnection.cs
//
// Author:
// Aaron Bockover <[email protected]>
//
// Copyright (C) 2007-2008 Novell, Inc.
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
using System;
using System.Linq;
using System.Collections.Generic;
using System.IO;
using System.Threading;
using Hyena;
using Hyena.Data;
using Hyena.Jobs;
using Hyena.Data.Sqlite;
using Banshee.Base;
using Banshee.ServiceStack;
using Banshee.Configuration;
namespace Banshee.Database
{
public sealed class BansheeDbConnection : HyenaSqliteConnection, IInitializeService, IRequiredService
{
private BansheeDbFormatMigrator migrator;
private DatabaseConfigurationClient configuration;
private bool validate_schema = false;
public DatabaseConfigurationClient Configuration {
get { return configuration; }
}
public BansheeDbConnection () : this (DatabaseFile)
{
validate_schema = ApplicationContext.CommandLine.Contains ("validate-db-schema");
}
internal BansheeDbConnection (string db_path) : base (db_path)
{
// Each cache page is about 1.5K, so 32768 pages = 49152K = 48M
int cache_size = (TableExists ("CoreTracks") && Query<long> ("SELECT COUNT(*) FROM CoreTracks") > 10000) ? 32768 : 16384;
Execute ("PRAGMA cache_size = ?", cache_size);
Execute ("PRAGMA synchronous = OFF");
Execute ("PRAGMA temp_store = MEMORY");
Execute ("PRAGMA count_changes = OFF");
// TODO didn't want this on b/c smart playlists used to rely on it, but
// now they shouldn't b/c we have smart custom functions we use for sorting/searching.
// See BGO #603665 for discussion about turning this back on.
//Execute ("PRAGMA case_sensitive_like=ON");
Log.DebugFormat ("Opened SQLite (version {1}) connection to {0}", db_path, ServerVersion);
migrator = new BansheeDbFormatMigrator (this);
configuration = new DatabaseConfigurationClient (this);
if (ApplicationContext.CommandLine.Contains ("debug-sql")) {
Hyena.Data.Sqlite.HyenaSqliteCommand.LogAll = true;
WarnIfCalledFromThread = ThreadAssist.MainThread;
}
}
internal IEnumerable<string> SortedTableColumns (string table)
{
return GetSchema (table).Keys.OrderBy (n => n);
}
void IInitializeService.Initialize ()
{
lock (this) {
migrator.Migrate ();
migrator = null;
try {
OptimizeDatabase ();
} catch (Exception e) {
Log.Exception ("Error determining if ANALYZE is necessary", e);
}
// Update cached sorting keys
BeginTransaction ();
try {
SortKeyUpdater.Update ();
CommitTransaction ();
} catch {
RollbackTransaction ();
}
}
if (Banshee.Metrics.BansheeMetrics.EnableCollection.Get ()) {
Banshee.Metrics.BansheeMetrics.Start ();
}
if (validate_schema) {
ValidateSchema ();
}
}
private void OptimizeDatabase ()
{
bool needs_analyze = false;
long analyze_threshold = configuration.Get<long> ("Database", "AnalyzeThreshold", 100);
string [] tables_with_indexes = {"CoreTracks", "CoreArtists", "CoreAlbums",
"CorePlaylistEntries", "PodcastItems", "PodcastEnclosures",
"PodcastSyndications", "CoverArtDownloads"};
if (TableExists ("sqlite_stat1")) {
foreach (string table_name in tables_with_indexes) {
if (TableExists (table_name)) {
long count = Query<long> (String.Format ("SELECT COUNT(*) FROM {0}", table_name));
string stat = Query<string> ("SELECT stat FROM sqlite_stat1 WHERE tbl = ? LIMIT 1", table_name);
// stat contains space-separated integers,
// the first is the number of records in the table
long items_indexed = stat != null ? long.Parse (stat.Split (' ')[0]) : 0;
if (Math.Abs (count - items_indexed) > analyze_threshold) {
needs_analyze = true;
break;
}
}
}
} else {
needs_analyze = true;
}
if (needs_analyze) {
Log.DebugFormat ("Running ANALYZE against database to improve performance");
Execute ("ANALYZE");
}
}
public BansheeDbFormatMigrator Migrator {
get { lock (this) { return migrator; } }
}
public bool ValidateSchema ()
{
bool is_valid = true;
var new_db_path = Paths.GetTempFileName (Paths.TempDir);
var new_db = new BansheeDbConnection (new_db_path);
((IInitializeService)new_db).Initialize ();
Hyena.Log.DebugFormat ("Validating db schema for {0}", DbPath);
var tables = new_db.QueryEnumerable<string> (
"select name from sqlite_master where type='table' order by name"
);
foreach (var table in tables) {
if (!TableExists (table)) {
Log.ErrorFormat ("Table {0} does not exist!", table);
is_valid = false;
} else {
var a = new_db.SortedTableColumns (table);
var b = SortedTableColumns (table);
a.Except (b).ForEach (c => { is_valid = false; Hyena.Log.ErrorFormat ("Table {0} should contain column {1}", table, c); });
b.Except (a).ForEach (c => Hyena.Log.DebugFormat ("Table {0} has extra (probably obsolete) column {1}", table, c));
}
}
using (var reader = new_db.Query (
"select name,sql from sqlite_master where type='index' AND name NOT LIKE 'sqlite_autoindex%' order by name")) {
while (reader.Read ()) {
string name = (string)reader[0];
string sql = (string)reader[1];
if (!IndexExists (name)) {
Log.ErrorFormat ("Index {0} does not exist!", name);
is_valid = false;
} else {
string our_sql = Query<string> ("select sql from sqlite_master where type='index' and name=?", name);
if (our_sql != sql) {
Log.ErrorFormat ("Index definition of {0} differs, should be `{1}` but is `{2}`", name, sql, our_sql);
is_valid = false;
}
}
}
}
Hyena.Log.DebugFormat ("Done validating db schema for {0}", DbPath);
System.IO.File.Delete (new_db_path);
return is_valid;
}
public static string DatabaseFile {
get {
if (ApplicationContext.CommandLine.Contains ("db")) {
return ApplicationContext.CommandLine["db"];
}
string proper_dbfile = Path.Combine (Paths.ApplicationData, "banshee.db");
if (File.Exists (proper_dbfile)) {
return proper_dbfile;
}
string dbfile = Path.Combine (Path.Combine (Environment.GetFolderPath (
Environment.SpecialFolder.ApplicationData),
"banshee"),
"banshee.db");
if (!File.Exists (dbfile)) {
string tdbfile = Path.Combine (Path.Combine (Path.Combine (Environment.GetFolderPath (
Environment.SpecialFolder.Personal),
".gnome2"),
"banshee"),
"banshee.db");
dbfile = tdbfile;
}
if (File.Exists (dbfile)) {
Log.InformationFormat ("Copying your old Banshee Database to {0}", proper_dbfile);
File.Copy (dbfile, proper_dbfile);
}
return proper_dbfile;
}
}
string IService.ServiceName {
get { return "DbConnection"; }
}
}
}
| |
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: cast_channel.proto
#pragma warning disable 1591, 0612, 3021
#region Designer generated code
using pb = global::Google.Protobuf;
using pbc = global::Google.Protobuf.Collections;
using pbr = global::Google.Protobuf.Reflection;
using scg = global::System.Collections.Generic;
namespace Extensions.Api.CastChannel {
/// <summary>Holder for reflection information generated from cast_channel.proto</summary>
public static partial class CastChannelReflection {
#region Descriptor
/// <summary>File descriptor for cast_channel.proto</summary>
public static pbr::FileDescriptor Descriptor {
get { return descriptor; }
}
private static pbr::FileDescriptor descriptor;
static CastChannelReflection() {
byte[] descriptorData = global::System.Convert.FromBase64String(
string.Concat(
"ChJjYXN0X2NoYW5uZWwucHJvdG8SG2V4dGVuc2lvbnMuYXBpLmNhc3RfY2hh",
"bm5lbCLjAgoLQ2FzdE1lc3NhZ2USUgoQcHJvdG9jb2xfdmVyc2lvbhgBIAEo",
"DjI4LmV4dGVuc2lvbnMuYXBpLmNhc3RfY2hhbm5lbC5DYXN0TWVzc2FnZS5Q",
"cm90b2NvbFZlcnNpb24SEQoJc291cmNlX2lkGAIgASgJEhYKDmRlc3RpbmF0",
"aW9uX2lkGAMgASgJEhEKCW5hbWVzcGFjZRgEIAEoCRJKCgxwYXlsb2FkX3R5",
"cGUYBSABKA4yNC5leHRlbnNpb25zLmFwaS5jYXN0X2NoYW5uZWwuQ2FzdE1l",
"c3NhZ2UuUGF5bG9hZFR5cGUSFAoMcGF5bG9hZF91dGY4GAYgASgJEhYKDnBh",
"eWxvYWRfYmluYXJ5GAcgASgMIiEKD1Byb3RvY29sVmVyc2lvbhIOCgpDQVNU",
"VjJfMV8wEAAiJQoLUGF5bG9hZFR5cGUSCgoGU1RSSU5HEAASCgoGQklOQVJZ",
"EAEitwEKDUF1dGhDaGFsbGVuZ2USTAoTc2lnbmF0dXJlX2FsZ29yaXRobRgB",
"IAEoDjIvLmV4dGVuc2lvbnMuYXBpLmNhc3RfY2hhbm5lbC5TaWduYXR1cmVB",
"bGdvcml0aG0SFAoMc2VuZGVyX25vbmNlGAIgASgMEkIKDmhhc2hfYWxnb3Jp",
"dGhtGAMgASgOMiouZXh0ZW5zaW9ucy5hcGkuY2FzdF9jaGFubmVsLkhhc2hB",
"bGdvcml0aG0imQIKDEF1dGhSZXNwb25zZRIRCglzaWduYXR1cmUYASABKAwS",
"HwoXY2xpZW50X2F1dGhfY2VydGlmaWNhdGUYAiABKAwSIAoYaW50ZXJtZWRp",
"YXRlX2NlcnRpZmljYXRlGAMgAygMEkwKE3NpZ25hdHVyZV9hbGdvcml0aG0Y",
"BCABKA4yLy5leHRlbnNpb25zLmFwaS5jYXN0X2NoYW5uZWwuU2lnbmF0dXJl",
"QWxnb3JpdGhtEhQKDHNlbmRlcl9ub25jZRgFIAEoDBJCCg5oYXNoX2FsZ29y",
"aXRobRgGIAEoDjIqLmV4dGVuc2lvbnMuYXBpLmNhc3RfY2hhbm5lbC5IYXNo",
"QWxnb3JpdGhtEgsKA2NybBgHIAEoDCKjAQoJQXV0aEVycm9yEkQKCmVycm9y",
"X3R5cGUYASABKA4yMC5leHRlbnNpb25zLmFwaS5jYXN0X2NoYW5uZWwuQXV0",
"aEVycm9yLkVycm9yVHlwZSJQCglFcnJvclR5cGUSEgoOSU5URVJOQUxfRVJS",
"T1IQABIKCgZOT19UTFMQARIjCh9TSUdOQVRVUkVfQUxHT1JJVEhNX1VOQVZB",
"SUxBQkxFEAIixgEKEURldmljZUF1dGhNZXNzYWdlEj0KCWNoYWxsZW5nZRgB",
"IAEoCzIqLmV4dGVuc2lvbnMuYXBpLmNhc3RfY2hhbm5lbC5BdXRoQ2hhbGxl",
"bmdlEjsKCHJlc3BvbnNlGAIgASgLMikuZXh0ZW5zaW9ucy5hcGkuY2FzdF9j",
"aGFubmVsLkF1dGhSZXNwb25zZRI1CgVlcnJvchgDIAEoCzImLmV4dGVuc2lv",
"bnMuYXBpLmNhc3RfY2hhbm5lbC5BdXRoRXJyb3IqSgoSU2lnbmF0dXJlQWxn",
"b3JpdGhtEg8KC1VOU1BFQ0lGSUVEEAASEwoPUlNBU1NBX1BLQ1MxdjE1EAES",
"DgoKUlNBU1NBX1BTUxACKiUKDUhhc2hBbGdvcml0aG0SCAoEU0hBMRAAEgoK",
"BlNIQTI1NhABYgZwcm90bzM="));
descriptor = pbr::FileDescriptor.FromGeneratedCode(descriptorData,
new pbr::FileDescriptor[] { },
new pbr::GeneratedClrTypeInfo(new[] {typeof(global::Extensions.Api.CastChannel.SignatureAlgorithm), typeof(global::Extensions.Api.CastChannel.HashAlgorithm), }, new pbr::GeneratedClrTypeInfo[] {
new pbr::GeneratedClrTypeInfo(typeof(global::Extensions.Api.CastChannel.CastMessage), global::Extensions.Api.CastChannel.CastMessage.Parser, new[]{ "ProtocolVersion", "SourceId", "DestinationId", "Namespace", "PayloadType", "PayloadUtf8", "PayloadBinary" }, null, new[]{ typeof(global::Extensions.Api.CastChannel.CastMessage.Types.ProtocolVersion), typeof(global::Extensions.Api.CastChannel.CastMessage.Types.PayloadType) }, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Extensions.Api.CastChannel.AuthChallenge), global::Extensions.Api.CastChannel.AuthChallenge.Parser, new[]{ "SignatureAlgorithm", "SenderNonce", "HashAlgorithm" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Extensions.Api.CastChannel.AuthResponse), global::Extensions.Api.CastChannel.AuthResponse.Parser, new[]{ "Signature", "ClientAuthCertificate", "IntermediateCertificate", "SignatureAlgorithm", "SenderNonce", "HashAlgorithm", "Crl" }, null, null, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Extensions.Api.CastChannel.AuthError), global::Extensions.Api.CastChannel.AuthError.Parser, new[]{ "ErrorType" }, null, new[]{ typeof(global::Extensions.Api.CastChannel.AuthError.Types.ErrorType) }, null),
new pbr::GeneratedClrTypeInfo(typeof(global::Extensions.Api.CastChannel.DeviceAuthMessage), global::Extensions.Api.CastChannel.DeviceAuthMessage.Parser, new[]{ "Challenge", "Response", "Error" }, null, null, null)
}));
}
#endregion
}
#region Enums
public enum SignatureAlgorithm {
[pbr::OriginalName("UNSPECIFIED")] Unspecified = 0,
[pbr::OriginalName("RSASSA_PKCS1v15")] RsassaPkcs1V15 = 1,
[pbr::OriginalName("RSASSA_PSS")] RsassaPss = 2,
}
public enum HashAlgorithm {
[pbr::OriginalName("SHA1")] Sha1 = 0,
[pbr::OriginalName("SHA256")] Sha256 = 1,
}
#endregion
#region Messages
public sealed partial class CastMessage : pb::IMessage<CastMessage> {
private static readonly pb::MessageParser<CastMessage> _parser = new pb::MessageParser<CastMessage>(() => new CastMessage());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<CastMessage> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Extensions.Api.CastChannel.CastChannelReflection.Descriptor.MessageTypes[0]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CastMessage() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CastMessage(CastMessage other) : this() {
protocolVersion_ = other.protocolVersion_;
sourceId_ = other.sourceId_;
destinationId_ = other.destinationId_;
namespace_ = other.namespace_;
payloadType_ = other.payloadType_;
payloadUtf8_ = other.payloadUtf8_;
payloadBinary_ = other.payloadBinary_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public CastMessage Clone() {
return new CastMessage(this);
}
/// <summary>Field number for the "protocol_version" field.</summary>
public const int ProtocolVersionFieldNumber = 1;
private global::Extensions.Api.CastChannel.CastMessage.Types.ProtocolVersion protocolVersion_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.CastMessage.Types.ProtocolVersion ProtocolVersion {
get { return protocolVersion_; }
set {
protocolVersion_ = value;
}
}
/// <summary>Field number for the "source_id" field.</summary>
public const int SourceIdFieldNumber = 2;
private string sourceId_ = "";
/// <summary>
/// source and destination ids identify the origin and destination of the
/// message. They are used to route messages between endpoints that share a
/// device-to-device channel.
///
/// For messages between applications:
/// - The sender application id is a unique identifier generated on behalf of
/// the sender application.
/// - The receiver id is always the the session id for the application.
///
/// For messages to or from the sender or receiver platform, the special ids
/// 'sender-0' and 'receiver-0' can be used.
///
/// For messages intended for all endpoints using a given channel, the
/// wildcard destination_id '*' can be used.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string SourceId {
get { return sourceId_; }
set {
sourceId_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "destination_id" field.</summary>
public const int DestinationIdFieldNumber = 3;
private string destinationId_ = "";
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string DestinationId {
get { return destinationId_; }
set {
destinationId_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "namespace" field.</summary>
public const int NamespaceFieldNumber = 4;
private string namespace_ = "";
/// <summary>
/// This is the core multiplexing key. All messages are sent on a namespace
/// and endpoints sharing a channel listen on one or more namespaces. The
/// namespace defines the protocol and semantics of the message.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string Namespace {
get { return namespace_; }
set {
namespace_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "payload_type" field.</summary>
public const int PayloadTypeFieldNumber = 5;
private global::Extensions.Api.CastChannel.CastMessage.Types.PayloadType payloadType_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.CastMessage.Types.PayloadType PayloadType {
get { return payloadType_; }
set {
payloadType_ = value;
}
}
/// <summary>Field number for the "payload_utf8" field.</summary>
public const int PayloadUtf8FieldNumber = 6;
private string payloadUtf8_ = "";
/// <summary>
/// Depending on payload_type, exactly one of the following optional fields
/// will always be set.
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public string PayloadUtf8 {
get { return payloadUtf8_; }
set {
payloadUtf8_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "payload_binary" field.</summary>
public const int PayloadBinaryFieldNumber = 7;
private pb::ByteString payloadBinary_ = pb::ByteString.Empty;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString PayloadBinary {
get { return payloadBinary_; }
set {
payloadBinary_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as CastMessage);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(CastMessage other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (ProtocolVersion != other.ProtocolVersion) return false;
if (SourceId != other.SourceId) return false;
if (DestinationId != other.DestinationId) return false;
if (Namespace != other.Namespace) return false;
if (PayloadType != other.PayloadType) return false;
if (PayloadUtf8 != other.PayloadUtf8) return false;
if (PayloadBinary != other.PayloadBinary) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (ProtocolVersion != 0) hash ^= ProtocolVersion.GetHashCode();
if (SourceId.Length != 0) hash ^= SourceId.GetHashCode();
if (DestinationId.Length != 0) hash ^= DestinationId.GetHashCode();
if (Namespace.Length != 0) hash ^= Namespace.GetHashCode();
if (PayloadType != 0) hash ^= PayloadType.GetHashCode();
if (PayloadUtf8.Length != 0) hash ^= PayloadUtf8.GetHashCode();
if (PayloadBinary.Length != 0) hash ^= PayloadBinary.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
//if (ProtocolVersion != 0) {
output.WriteRawTag(8);
output.WriteEnum((int) ProtocolVersion);
//}
if (SourceId.Length != 0) {
output.WriteRawTag(18);
output.WriteString(SourceId);
}
if (DestinationId.Length != 0) {
output.WriteRawTag(26);
output.WriteString(DestinationId);
}
if (Namespace.Length != 0) {
output.WriteRawTag(34);
output.WriteString(Namespace);
}
//if (PayloadType != 0) {
output.WriteRawTag(40);
output.WriteEnum((int) PayloadType);
//}
if (PayloadUtf8.Length != 0) {
output.WriteRawTag(50);
output.WriteString(PayloadUtf8);
}
if (PayloadBinary.Length != 0) {
output.WriteRawTag(58);
output.WriteBytes(PayloadBinary);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
//if (ProtocolVersion != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) ProtocolVersion);
//}
if (SourceId.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(SourceId);
}
if (DestinationId.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(DestinationId);
}
if (Namespace.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(Namespace);
}
//if (PayloadType != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) PayloadType);
//}
if (PayloadUtf8.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeStringSize(PayloadUtf8);
}
if (PayloadBinary.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(PayloadBinary);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(CastMessage other) {
if (other == null) {
return;
}
if (other.ProtocolVersion != 0) {
ProtocolVersion = other.ProtocolVersion;
}
if (other.SourceId.Length != 0) {
SourceId = other.SourceId;
}
if (other.DestinationId.Length != 0) {
DestinationId = other.DestinationId;
}
if (other.Namespace.Length != 0) {
Namespace = other.Namespace;
}
if (other.PayloadType != 0) {
PayloadType = other.PayloadType;
}
if (other.PayloadUtf8.Length != 0) {
PayloadUtf8 = other.PayloadUtf8;
}
if (other.PayloadBinary.Length != 0) {
PayloadBinary = other.PayloadBinary;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
protocolVersion_ = (global::Extensions.Api.CastChannel.CastMessage.Types.ProtocolVersion) input.ReadEnum();
break;
}
case 18: {
SourceId = input.ReadString();
break;
}
case 26: {
DestinationId = input.ReadString();
break;
}
case 34: {
Namespace = input.ReadString();
break;
}
case 40: {
payloadType_ = (global::Extensions.Api.CastChannel.CastMessage.Types.PayloadType) input.ReadEnum();
break;
}
case 50: {
PayloadUtf8 = input.ReadString();
break;
}
case 58: {
PayloadBinary = input.ReadBytes();
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the CastMessage message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
/// <summary>
/// Always pass a version of the protocol for future compatibility
/// requirements.
/// </summary>
public enum ProtocolVersion {
[pbr::OriginalName("CASTV2_1_0")] Castv210 = 0,
}
/// <summary>
/// Encoding and payload info follows.
/// What type of data do we have in this message.
/// </summary>
public enum PayloadType {
[pbr::OriginalName("STRING")] String = 0,
[pbr::OriginalName("BINARY")] Binary = 1,
}
}
#endregion
}
/// <summary>
/// Messages for authentication protocol between a sender and a receiver.
/// </summary>
public sealed partial class AuthChallenge : pb::IMessage<AuthChallenge> {
private static readonly pb::MessageParser<AuthChallenge> _parser = new pb::MessageParser<AuthChallenge>(() => new AuthChallenge());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<AuthChallenge> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Extensions.Api.CastChannel.CastChannelReflection.Descriptor.MessageTypes[1]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthChallenge() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthChallenge(AuthChallenge other) : this() {
signatureAlgorithm_ = other.signatureAlgorithm_;
senderNonce_ = other.senderNonce_;
hashAlgorithm_ = other.hashAlgorithm_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthChallenge Clone() {
return new AuthChallenge(this);
}
/// <summary>Field number for the "signature_algorithm" field.</summary>
public const int SignatureAlgorithmFieldNumber = 1;
private global::Extensions.Api.CastChannel.SignatureAlgorithm signatureAlgorithm_ = SignatureAlgorithm.RsassaPkcs1V15;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.SignatureAlgorithm SignatureAlgorithm {
get { return signatureAlgorithm_; }
set {
signatureAlgorithm_ = value;
}
}
/// <summary>Field number for the "sender_nonce" field.</summary>
public const int SenderNonceFieldNumber = 2;
private pb::ByteString senderNonce_ = pb::ByteString.Empty;
/// <summary>
///[default = RSASSA_PKCS1v15];
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString SenderNonce {
get { return senderNonce_; }
set {
senderNonce_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "hash_algorithm" field.</summary>
public const int HashAlgorithmFieldNumber = 3;
private global::Extensions.Api.CastChannel.HashAlgorithm hashAlgorithm_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.HashAlgorithm HashAlgorithm {
get { return hashAlgorithm_; }
set {
hashAlgorithm_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as AuthChallenge);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(AuthChallenge other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (SignatureAlgorithm != other.SignatureAlgorithm) return false;
if (SenderNonce != other.SenderNonce) return false;
if (HashAlgorithm != other.HashAlgorithm) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (SignatureAlgorithm != 0) hash ^= SignatureAlgorithm.GetHashCode();
if (SenderNonce.Length != 0) hash ^= SenderNonce.GetHashCode();
if (HashAlgorithm != 0) hash ^= HashAlgorithm.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
//if (SignatureAlgorithm != 0) {
output.WriteRawTag(8);
output.WriteEnum((int) SignatureAlgorithm);
//}
if (SenderNonce.Length != 0) {
output.WriteRawTag(18);
output.WriteBytes(SenderNonce);
}
//if (HashAlgorithm != 0) {
output.WriteRawTag(24);
output.WriteEnum((int) HashAlgorithm);
//}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
//if (SignatureAlgorithm != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) SignatureAlgorithm);
//}
if (SenderNonce.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(SenderNonce);
}
//if (HashAlgorithm != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) HashAlgorithm);
//}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(AuthChallenge other) {
if (other == null) {
return;
}
if (other.SignatureAlgorithm != 0) {
SignatureAlgorithm = other.SignatureAlgorithm;
}
if (other.SenderNonce.Length != 0) {
SenderNonce = other.SenderNonce;
}
if (other.HashAlgorithm != 0) {
HashAlgorithm = other.HashAlgorithm;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
signatureAlgorithm_ = (global::Extensions.Api.CastChannel.SignatureAlgorithm) input.ReadEnum();
break;
}
case 18: {
SenderNonce = input.ReadBytes();
break;
}
case 24: {
hashAlgorithm_ = (global::Extensions.Api.CastChannel.HashAlgorithm) input.ReadEnum();
break;
}
}
}
}
}
public sealed partial class AuthResponse : pb::IMessage<AuthResponse> {
private static readonly pb::MessageParser<AuthResponse> _parser = new pb::MessageParser<AuthResponse>(() => new AuthResponse());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<AuthResponse> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Extensions.Api.CastChannel.CastChannelReflection.Descriptor.MessageTypes[2]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthResponse() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthResponse(AuthResponse other) : this() {
signature_ = other.signature_;
clientAuthCertificate_ = other.clientAuthCertificate_;
intermediateCertificate_ = other.intermediateCertificate_.Clone();
signatureAlgorithm_ = other.signatureAlgorithm_;
senderNonce_ = other.senderNonce_;
hashAlgorithm_ = other.hashAlgorithm_;
crl_ = other.crl_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthResponse Clone() {
return new AuthResponse(this);
}
/// <summary>Field number for the "signature" field.</summary>
public const int SignatureFieldNumber = 1;
private pb::ByteString signature_ = pb::ByteString.Empty;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString Signature {
get { return signature_; }
set {
signature_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "client_auth_certificate" field.</summary>
public const int ClientAuthCertificateFieldNumber = 2;
private pb::ByteString clientAuthCertificate_ = pb::ByteString.Empty;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString ClientAuthCertificate {
get { return clientAuthCertificate_; }
set {
clientAuthCertificate_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "intermediate_certificate" field.</summary>
public const int IntermediateCertificateFieldNumber = 3;
private static readonly pb::FieldCodec<pb::ByteString> _repeated_intermediateCertificate_codec
= pb::FieldCodec.ForBytes(26);
private readonly pbc::RepeatedField<pb::ByteString> intermediateCertificate_ = new pbc::RepeatedField<pb::ByteString>();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pbc::RepeatedField<pb::ByteString> IntermediateCertificate {
get { return intermediateCertificate_; }
}
/// <summary>Field number for the "signature_algorithm" field.</summary>
public const int SignatureAlgorithmFieldNumber = 4;
private global::Extensions.Api.CastChannel.SignatureAlgorithm signatureAlgorithm_ = SignatureAlgorithm.RsassaPkcs1V15;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.SignatureAlgorithm SignatureAlgorithm {
get { return signatureAlgorithm_; }
set {
signatureAlgorithm_ = value;
}
}
/// <summary>Field number for the "sender_nonce" field.</summary>
public const int SenderNonceFieldNumber = 5;
private pb::ByteString senderNonce_ = pb::ByteString.Empty;
/// <summary>
///[default = RSASSA_PKCS1v15];
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString SenderNonce {
get { return senderNonce_; }
set {
senderNonce_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
/// <summary>Field number for the "hash_algorithm" field.</summary>
public const int HashAlgorithmFieldNumber = 6;
private global::Extensions.Api.CastChannel.HashAlgorithm hashAlgorithm_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.HashAlgorithm HashAlgorithm {
get { return hashAlgorithm_; }
set {
hashAlgorithm_ = value;
}
}
/// <summary>Field number for the "crl" field.</summary>
public const int CrlFieldNumber = 7;
private pb::ByteString crl_ = pb::ByteString.Empty;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public pb::ByteString Crl {
get { return crl_; }
set {
crl_ = pb::ProtoPreconditions.CheckNotNull(value, "value");
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as AuthResponse);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(AuthResponse other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (Signature != other.Signature) return false;
if (ClientAuthCertificate != other.ClientAuthCertificate) return false;
if(!intermediateCertificate_.Equals(other.intermediateCertificate_)) return false;
if (SignatureAlgorithm != other.SignatureAlgorithm) return false;
if (SenderNonce != other.SenderNonce) return false;
if (HashAlgorithm != other.HashAlgorithm) return false;
if (Crl != other.Crl) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (Signature.Length != 0) hash ^= Signature.GetHashCode();
if (ClientAuthCertificate.Length != 0) hash ^= ClientAuthCertificate.GetHashCode();
hash ^= intermediateCertificate_.GetHashCode();
if (SignatureAlgorithm != 0) hash ^= SignatureAlgorithm.GetHashCode();
if (SenderNonce.Length != 0) hash ^= SenderNonce.GetHashCode();
if (HashAlgorithm != 0) hash ^= HashAlgorithm.GetHashCode();
if (Crl.Length != 0) hash ^= Crl.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (Signature.Length != 0) {
output.WriteRawTag(10);
output.WriteBytes(Signature);
}
if (ClientAuthCertificate.Length != 0) {
output.WriteRawTag(18);
output.WriteBytes(ClientAuthCertificate);
}
intermediateCertificate_.WriteTo(output, _repeated_intermediateCertificate_codec);
//if (SignatureAlgorithm != 0) {
output.WriteRawTag(32);
output.WriteEnum((int) SignatureAlgorithm);
//}
if (SenderNonce.Length != 0) {
output.WriteRawTag(42);
output.WriteBytes(SenderNonce);
}
//if (HashAlgorithm != 0) {
output.WriteRawTag(48);
output.WriteEnum((int) HashAlgorithm);
//}
if (Crl.Length != 0) {
output.WriteRawTag(58);
output.WriteBytes(Crl);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (Signature.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(Signature);
}
if (ClientAuthCertificate.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(ClientAuthCertificate);
}
size += intermediateCertificate_.CalculateSize(_repeated_intermediateCertificate_codec);
//if (SignatureAlgorithm != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) SignatureAlgorithm);
//}
if (SenderNonce.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(SenderNonce);
}
//if (HashAlgorithm != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) HashAlgorithm);
//}
if (Crl.Length != 0) {
size += 1 + pb::CodedOutputStream.ComputeBytesSize(Crl);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(AuthResponse other) {
if (other == null) {
return;
}
if (other.Signature.Length != 0) {
Signature = other.Signature;
}
if (other.ClientAuthCertificate.Length != 0) {
ClientAuthCertificate = other.ClientAuthCertificate;
}
intermediateCertificate_.Add(other.intermediateCertificate_);
if (other.SignatureAlgorithm != 0) {
SignatureAlgorithm = other.SignatureAlgorithm;
}
if (other.SenderNonce.Length != 0) {
SenderNonce = other.SenderNonce;
}
if (other.HashAlgorithm != 0) {
HashAlgorithm = other.HashAlgorithm;
}
if (other.Crl.Length != 0) {
Crl = other.Crl;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
Signature = input.ReadBytes();
break;
}
case 18: {
ClientAuthCertificate = input.ReadBytes();
break;
}
case 26: {
intermediateCertificate_.AddEntriesFrom(input, _repeated_intermediateCertificate_codec);
break;
}
case 32: {
signatureAlgorithm_ = (global::Extensions.Api.CastChannel.SignatureAlgorithm) input.ReadEnum();
break;
}
case 42: {
SenderNonce = input.ReadBytes();
break;
}
case 48: {
hashAlgorithm_ = (global::Extensions.Api.CastChannel.HashAlgorithm) input.ReadEnum();
break;
}
case 58: {
Crl = input.ReadBytes();
break;
}
}
}
}
}
public sealed partial class AuthError : pb::IMessage<AuthError> {
private static readonly pb::MessageParser<AuthError> _parser = new pb::MessageParser<AuthError>(() => new AuthError());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<AuthError> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Extensions.Api.CastChannel.CastChannelReflection.Descriptor.MessageTypes[3]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthError() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthError(AuthError other) : this() {
errorType_ = other.errorType_;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public AuthError Clone() {
return new AuthError(this);
}
/// <summary>Field number for the "error_type" field.</summary>
public const int ErrorTypeFieldNumber = 1;
private global::Extensions.Api.CastChannel.AuthError.Types.ErrorType errorType_ = 0;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.AuthError.Types.ErrorType ErrorType {
get { return errorType_; }
set {
errorType_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as AuthError);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(AuthError other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (ErrorType != other.ErrorType) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (ErrorType != 0) hash ^= ErrorType.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
//if (ErrorType != 0) {
output.WriteRawTag(8);
output.WriteEnum((int) ErrorType);
//}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
//if (ErrorType != 0) {
size += 1 + pb::CodedOutputStream.ComputeEnumSize((int) ErrorType);
//}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(AuthError other) {
if (other == null) {
return;
}
if (other.ErrorType != 0) {
ErrorType = other.ErrorType;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 8: {
errorType_ = (global::Extensions.Api.CastChannel.AuthError.Types.ErrorType) input.ReadEnum();
break;
}
}
}
}
#region Nested types
/// <summary>Container for nested types declared in the AuthError message type.</summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static partial class Types {
public enum ErrorType {
[pbr::OriginalName("INTERNAL_ERROR")] InternalError = 0,
/// <summary>
/// The underlying connection is not TLS
/// </summary>
[pbr::OriginalName("NO_TLS")] NoTls = 1,
[pbr::OriginalName("SIGNATURE_ALGORITHM_UNAVAILABLE")] SignatureAlgorithmUnavailable = 2,
}
}
#endregion
}
public sealed partial class DeviceAuthMessage : pb::IMessage<DeviceAuthMessage> {
private static readonly pb::MessageParser<DeviceAuthMessage> _parser = new pb::MessageParser<DeviceAuthMessage>(() => new DeviceAuthMessage());
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pb::MessageParser<DeviceAuthMessage> Parser { get { return _parser; } }
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public static pbr::MessageDescriptor Descriptor {
get { return global::Extensions.Api.CastChannel.CastChannelReflection.Descriptor.MessageTypes[4]; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
pbr::MessageDescriptor pb::IMessage.Descriptor {
get { return Descriptor; }
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DeviceAuthMessage() {
OnConstruction();
}
partial void OnConstruction();
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DeviceAuthMessage(DeviceAuthMessage other) : this() {
Challenge = other.challenge_ != null ? other.Challenge.Clone() : null;
Response = other.response_ != null ? other.Response.Clone() : null;
Error = other.error_ != null ? other.Error.Clone() : null;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public DeviceAuthMessage Clone() {
return new DeviceAuthMessage(this);
}
/// <summary>Field number for the "challenge" field.</summary>
public const int ChallengeFieldNumber = 1;
private global::Extensions.Api.CastChannel.AuthChallenge challenge_;
/// <summary>
/// Request fields
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.AuthChallenge Challenge {
get { return challenge_; }
set {
challenge_ = value;
}
}
/// <summary>Field number for the "response" field.</summary>
public const int ResponseFieldNumber = 2;
private global::Extensions.Api.CastChannel.AuthResponse response_;
/// <summary>
/// Response fields
/// </summary>
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.AuthResponse Response {
get { return response_; }
set {
response_ = value;
}
}
/// <summary>Field number for the "error" field.</summary>
public const int ErrorFieldNumber = 3;
private global::Extensions.Api.CastChannel.AuthError error_;
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public global::Extensions.Api.CastChannel.AuthError Error {
get { return error_; }
set {
error_ = value;
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override bool Equals(object other) {
return Equals(other as DeviceAuthMessage);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public bool Equals(DeviceAuthMessage other) {
if (ReferenceEquals(other, null)) {
return false;
}
if (ReferenceEquals(other, this)) {
return true;
}
if (!object.Equals(Challenge, other.Challenge)) return false;
if (!object.Equals(Response, other.Response)) return false;
if (!object.Equals(Error, other.Error)) return false;
return true;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override int GetHashCode() {
int hash = 1;
if (challenge_ != null) hash ^= Challenge.GetHashCode();
if (response_ != null) hash ^= Response.GetHashCode();
if (error_ != null) hash ^= Error.GetHashCode();
return hash;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public override string ToString() {
return pb::JsonFormatter.ToDiagnosticString(this);
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void WriteTo(pb::CodedOutputStream output) {
if (challenge_ != null) {
output.WriteRawTag(10);
output.WriteMessage(Challenge);
}
if (response_ != null) {
output.WriteRawTag(18);
output.WriteMessage(Response);
}
if (error_ != null) {
output.WriteRawTag(26);
output.WriteMessage(Error);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public int CalculateSize() {
int size = 0;
if (challenge_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Challenge);
}
if (response_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Response);
}
if (error_ != null) {
size += 1 + pb::CodedOutputStream.ComputeMessageSize(Error);
}
return size;
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(DeviceAuthMessage other) {
if (other == null) {
return;
}
if (other.challenge_ != null) {
if (challenge_ == null) {
challenge_ = new global::Extensions.Api.CastChannel.AuthChallenge();
}
Challenge.MergeFrom(other.Challenge);
}
if (other.response_ != null) {
if (response_ == null) {
response_ = new global::Extensions.Api.CastChannel.AuthResponse();
}
Response.MergeFrom(other.Response);
}
if (other.error_ != null) {
if (error_ == null) {
error_ = new global::Extensions.Api.CastChannel.AuthError();
}
Error.MergeFrom(other.Error);
}
}
[global::System.Diagnostics.DebuggerNonUserCodeAttribute]
public void MergeFrom(pb::CodedInputStream input) {
uint tag;
while ((tag = input.ReadTag()) != 0) {
switch(tag) {
default:
input.SkipLastField();
break;
case 10: {
if (challenge_ == null) {
challenge_ = new global::Extensions.Api.CastChannel.AuthChallenge();
}
input.ReadMessage(challenge_);
break;
}
case 18: {
if (response_ == null) {
response_ = new global::Extensions.Api.CastChannel.AuthResponse();
}
input.ReadMessage(response_);
break;
}
case 26: {
if (error_ == null) {
error_ = new global::Extensions.Api.CastChannel.AuthError();
}
input.ReadMessage(error_);
break;
}
}
}
}
}
#endregion
}
#endregion Designer generated code
| |
// --------------------------------------------------------------------------------------------------------------------
// <copyright file="PhotonHandler.cs" company="Exit Games GmbH">
// Part of: Photon Unity Networking
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
#if UNITY_5 && (!UNITY_5_0 && !UNITY_5_1 && !UNITY_5_2 && !UNITY_5_3) || UNITY_6
#define UNITY_MIN_5_4
#endif
using System;
using System.Collections;
using System.Diagnostics;
using ExitGames.Client.Photon;
using UnityEngine;
using Debug = UnityEngine.Debug;
using Hashtable = ExitGames.Client.Photon.Hashtable;
using SupportClassPun = ExitGames.Client.Photon.SupportClass;
#if UNITY_5_5_OR_NEWER
using UnityEngine.Profiling;
#endif
/// <summary>
/// Internal Monobehaviour that allows Photon to run an Update loop.
/// </summary>
internal class PhotonHandler : MonoBehaviour
{
public static PhotonHandler SP;
public int updateInterval; // time [ms] between consecutive SendOutgoingCommands calls
public int updateIntervalOnSerialize; // time [ms] between consecutive RunViewUpdate calls (sending syncs, etc)
private int nextSendTickCount = 0;
private int nextSendTickCountOnSerialize = 0;
private static bool sendThreadShouldRun;
private static Stopwatch timerToStopConnectionInBackground;
protected internal static bool AppQuits;
protected internal static Type PingImplementation = null;
protected void Awake()
{
if (SP != null && SP != this && SP.gameObject != null)
{
GameObject.DestroyImmediate(SP.gameObject);
}
SP = this;
DontDestroyOnLoad(this.gameObject);
this.updateInterval = 1000 / PhotonNetwork.sendRate;
this.updateIntervalOnSerialize = 1000 / PhotonNetwork.sendRateOnSerialize;
PhotonHandler.StartFallbackSendAckThread();
}
#if UNITY_MIN_5_4
protected void Start()
{
UnityEngine.SceneManagement.SceneManager.sceneLoaded += (scene, loadingMode) =>
{
PhotonNetwork.networkingPeer.NewSceneLoaded();
PhotonNetwork.networkingPeer.SetLevelInPropsIfSynced(SceneManagerHelper.ActiveSceneName);
};
}
#else
/// <summary>Called by Unity after a new level was loaded.</summary>
protected void OnLevelWasLoaded(int level)
{
PhotonNetwork.networkingPeer.NewSceneLoaded();
PhotonNetwork.networkingPeer.SetLevelInPropsIfSynced(SceneManagerHelper.ActiveSceneName);
}
#endif
/// <summary>Called by Unity when the application is closed. Disconnects.</summary>
protected void OnApplicationQuit()
{
PhotonHandler.AppQuits = true;
PhotonHandler.StopFallbackSendAckThread();
PhotonNetwork.Disconnect();
}
/// <summary>
/// Called by Unity when the application gets paused (e.g. on Android when in background).
/// </summary>
/// <remarks>
/// Sets a disconnect timer when PhotonNetwork.BackgroundTimeout > 0.1f. See PhotonNetwork.BackgroundTimeout.
///
/// Some versions of Unity will give false values for pause on Android (and possibly on other platforms).
/// </remarks>
/// <param name="pause">If the app pauses.</param>
protected void OnApplicationPause(bool pause)
{
if (PhotonNetwork.BackgroundTimeout > 0.1f)
{
if (timerToStopConnectionInBackground == null)
{
timerToStopConnectionInBackground = new Stopwatch();
}
timerToStopConnectionInBackground.Reset();
if (pause)
{
timerToStopConnectionInBackground.Start();
}
else
{
timerToStopConnectionInBackground.Stop();
}
}
}
/// <summary>Called by Unity when the play mode ends. Used to cleanup.</summary>
protected void OnDestroy()
{
//Debug.Log("OnDestroy on PhotonHandler.");
PhotonHandler.StopFallbackSendAckThread();
//PhotonNetwork.Disconnect();
}
protected void Update()
{
if (PhotonNetwork.networkingPeer == null)
{
Debug.LogError("NetworkPeer broke!");
return;
}
if (PhotonNetwork.connectionStateDetailed == ClientState.PeerCreated || PhotonNetwork.connectionStateDetailed == ClientState.Disconnected || PhotonNetwork.offlineMode)
{
return;
}
// the messageQueue might be paused. in that case a thread will send acknowledgements only. nothing else to do here.
if (!PhotonNetwork.isMessageQueueRunning)
{
return;
}
bool doDispatch = true;
while (PhotonNetwork.isMessageQueueRunning && doDispatch)
{
// DispatchIncomingCommands() returns true of it found any command to dispatch (event, result or state change)
Profiler.BeginSample("DispatchIncomingCommands");
doDispatch = PhotonNetwork.networkingPeer.DispatchIncomingCommands();
Profiler.EndSample();
}
int currentMsSinceStart = (int)(Time.realtimeSinceStartup * 1000); // avoiding Environment.TickCount, which could be negative on long-running platforms
if (PhotonNetwork.isMessageQueueRunning && currentMsSinceStart > this.nextSendTickCountOnSerialize)
{
PhotonNetwork.networkingPeer.RunViewUpdate();
this.nextSendTickCountOnSerialize = currentMsSinceStart + this.updateIntervalOnSerialize;
this.nextSendTickCount = 0; // immediately send when synchronization code was running
}
currentMsSinceStart = (int)(Time.realtimeSinceStartup * 1000);
if (currentMsSinceStart > this.nextSendTickCount)
{
bool doSend = true;
while (PhotonNetwork.isMessageQueueRunning && doSend)
{
// Send all outgoing commands
Profiler.BeginSample("SendOutgoingCommands");
doSend = PhotonNetwork.networkingPeer.SendOutgoingCommands();
Profiler.EndSample();
}
this.nextSendTickCount = currentMsSinceStart + this.updateInterval;
}
}
protected void OnJoinedRoom()
{
PhotonNetwork.networkingPeer.LoadLevelIfSynced();
}
protected void OnCreatedRoom()
{
PhotonNetwork.networkingPeer.SetLevelInPropsIfSynced(SceneManagerHelper.ActiveSceneName);
}
public static void StartFallbackSendAckThread()
{
#if !UNITY_WEBGL
if (sendThreadShouldRun)
{
return;
}
sendThreadShouldRun = true;
SupportClassPun.CallInBackground(FallbackSendAckThread); // thread will call this every 100ms until method returns false
#endif
}
public static void StopFallbackSendAckThread()
{
#if !UNITY_WEBGL
sendThreadShouldRun = false;
#endif
}
/// <summary>A thread which runs independent from the Update() calls. Keeps connections online while loading or in background. See PhotonNetwork.BackgroundTimeout.</summary>
public static bool FallbackSendAckThread()
{
if (sendThreadShouldRun && !PhotonNetwork.offlineMode && PhotonNetwork.networkingPeer != null)
{
// check if the client should disconnect after some seconds in background
if (timerToStopConnectionInBackground != null && PhotonNetwork.BackgroundTimeout > 0.1f)
{
if (timerToStopConnectionInBackground.ElapsedMilliseconds > PhotonNetwork.BackgroundTimeout * 1000)
{
if (PhotonNetwork.connected)
{
PhotonNetwork.Disconnect();
}
timerToStopConnectionInBackground.Stop();
timerToStopConnectionInBackground.Reset();
return sendThreadShouldRun;
}
}
if (PhotonNetwork.networkingPeer.ConnectionTime - PhotonNetwork.networkingPeer.LastSendOutgoingTime > 200)
{
PhotonNetwork.networkingPeer.SendAcksOnly();
}
}
return sendThreadShouldRun;
}
#region Photon Cloud Ping Evaluation
private const string PlayerPrefsKey = "PUNCloudBestRegion";
internal static CloudRegionCode BestRegionCodeCurrently = CloudRegionCode.none; // default to none
internal static CloudRegionCode BestRegionCodeInPreferences
{
get
{
string prefsRegionCode = PlayerPrefs.GetString(PlayerPrefsKey, "");
if (!string.IsNullOrEmpty(prefsRegionCode))
{
CloudRegionCode loadedRegion = Region.Parse(prefsRegionCode);
return loadedRegion;
}
return CloudRegionCode.none;
}
set
{
if (value == CloudRegionCode.none)
{
PlayerPrefs.DeleteKey(PlayerPrefsKey);
}
else
{
PlayerPrefs.SetString(PlayerPrefsKey, value.ToString());
}
}
}
internal protected static void PingAvailableRegionsAndConnectToBest()
{
SP.StartCoroutine(SP.PingAvailableRegionsCoroutine(true));
}
internal IEnumerator PingAvailableRegionsCoroutine(bool connectToBest)
{
BestRegionCodeCurrently = CloudRegionCode.none;
while (PhotonNetwork.networkingPeer.AvailableRegions == null)
{
if (PhotonNetwork.connectionStateDetailed != ClientState.ConnectingToNameServer && PhotonNetwork.connectionStateDetailed != ClientState.ConnectedToNameServer)
{
Debug.LogError("Call ConnectToNameServer to ping available regions.");
yield break; // break if we don't connect to the nameserver at all
}
Debug.Log("Waiting for AvailableRegions. State: " + PhotonNetwork.connectionStateDetailed + " Server: " + PhotonNetwork.Server + " PhotonNetwork.networkingPeer.AvailableRegions " + (PhotonNetwork.networkingPeer.AvailableRegions != null));
yield return new WaitForSeconds(0.25f); // wait until pinging finished (offline mode won't ping)
}
if (PhotonNetwork.networkingPeer.AvailableRegions == null || PhotonNetwork.networkingPeer.AvailableRegions.Count == 0)
{
Debug.LogError("No regions available. Are you sure your appid is valid and setup?");
yield break; // break if we don't get regions at all
}
PhotonPingManager pingManager = new PhotonPingManager();
foreach (Region region in PhotonNetwork.networkingPeer.AvailableRegions)
{
SP.StartCoroutine(pingManager.PingSocket(region));
}
while (!pingManager.Done)
{
yield return new WaitForSeconds(0.1f); // wait until pinging finished (offline mode won't ping)
}
Region best = pingManager.BestRegion;
PhotonHandler.BestRegionCodeCurrently = best.Code;
PhotonHandler.BestRegionCodeInPreferences = best.Code;
Debug.Log("Found best region: " + best.Code + " ping: " + best.Ping + ". Calling ConnectToRegionMaster() is: " + connectToBest);
if (connectToBest)
{
PhotonNetwork.networkingPeer.ConnectToRegionMaster(best.Code);
}
}
#endregion
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
namespace Bond.Protocols
{
using System;
using System.Collections.Generic;
using System.IO;
using System.Runtime.CompilerServices;
using System.Xml;
[Reader(typeof(SimpleXmlReader))]
public struct SimpleXmlWriter : IProtocolWriter, ITextProtocolWriter
{
public struct Settings
{
public static readonly Settings Default = new Settings();
public bool UseNamespaces { set; get; }
}
readonly XmlWriter writer;
readonly Stack<string> ns;
public SimpleXmlWriter(XmlWriter writer)
: this(writer, Settings.Default)
{}
public SimpleXmlWriter(Stream stream)
: this(stream, Settings.Default)
{}
public SimpleXmlWriter(Stream stream, Settings settings)
: this(XmlWriter.Create(stream, new XmlWriterSettings { OmitXmlDeclaration = true, Indent = true }), settings)
{}
public SimpleXmlWriter(XmlWriter writer, Settings settings)
{
this.writer = writer;
ns = settings.UseNamespaces ? new Stack<string>() : null;
}
public void Flush()
{
writer.Flush();
}
#region IProtocolWriter
public void WriteVersion()
{
throw new NotImplementedException();
}
#region Struct
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteStructBegin(Metadata metadata)
{
writer.WriteStartElement(metadata.GetXmlName());
PushNamespace(metadata);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBaseBegin(Metadata metadata)
{
PushNamespace(metadata);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteStructEnd()
{
PopNamespace();
writer.WriteEndElement();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBaseEnd()
{
PopNamespace();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFieldBegin(BondDataType dataType, ushort id, Metadata metadata)
{
writer.WriteStartElement(Prefix, metadata.name, null);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFieldEnd()
{
writer.WriteEndElement();
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFieldOmitted(BondDataType dataType, ushort id, Metadata metadata)
{ }
#endregion
#region Containers
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteContainerBegin(int count, BondDataType elementType)
{ }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteContainerBegin(int count, BondDataType keyType, BondDataType valueType)
{ }
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteContainerEnd()
{ }
#region ITextProtocolWriter
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteItemBegin()
{
writer.WriteStartElement("Item");
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteItemEnd()
{
writer.WriteEndElement();
}
#endregion
#endregion
#region Scalars
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt8(sbyte value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt16(short value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt32(int value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteInt64(long value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt8(byte value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt16(ushort value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt32(uint value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteUInt64(ulong value)
{
writer.WriteValue(value.ToString());
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteFloat(float value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteDouble(double value)
{
writer.WriteValue(value);
}
public void WriteBytes(ArraySegment<byte> data)
{
// TODO: for now handle blob as array of bytes; consider CDATA
var end = data.Offset + data.Count;
for (var i = data.Offset; i != end; ++i)
{
WriteItemBegin();
WriteInt8((sbyte)data.Array[i]);
WriteItemEnd();
}
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteBool(bool value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteString(string value)
{
writer.WriteValue(value);
}
#if NET45
[MethodImpl(MethodImplOptions.AggressiveInlining)]
#endif
public void WriteWString(string value)
{
writer.WriteValue(value);
}
#endregion
#endregion
void PushNamespace(Metadata metadata)
{
if (ns == null) return;
var prefixLength = ns.Count + 1;
var prefix = prefixLength > metadata.name.Length ?
metadata.name.PadRight(prefixLength, '_') :
metadata.name.Substring(0, prefixLength);
prefix = prefix.EncodeXmlName();
ns.Push(prefix);
writer.WriteAttributeString("xmlns", prefix, null, metadata.GetXmlNamespace());
}
void PopNamespace()
{
if (ns != null) ns.Pop();
}
string Prefix { get { return ns != null ? ns.Peek() : string.Empty; } }
}
}
| |
using UnityEngine;
using UnityEditor;
using System;
using System.IO;
using System.Text.RegularExpressions;
using System.Collections.Generic;
namespace PixelCrushers.DialogueSystem
{
/// <summary>
/// This is an example converter that demonstrates how to make a subclass of
/// AbstractConverterWindow to make your own converter for the Dialogue System.
/// It converts CSV (comma-separated value) files in a specific format into a
/// dialogue database.
///
/// In the CSV file, each section is optional. The sections are:
///
/// - Database
/// - Actors
/// - Items (also used for quests)
/// - Locations
/// - Variables
/// - Conversations (high level information about each conversation)
/// - DialogueEntries (individual dialogue entries in the conversations)
/// - OutgoingLinks (links between dialogue entries)
///
/// The Database section must contain:
/// <pre>
/// Database
/// Name,Version,Author,Description,Emphasis1,Emphasis2,Emphasis3,Emphasis4
/// (name),(version),(author),(description),(emphasis setting 1 in format #rrggbbaa biu),(emphasis2),(emphasis3),(emphasis4)
/// Global User Script
/// (luacode)
/// </pre>
///
/// The Actors section must contain:
/// <pre>
/// Actors
/// ID,Portrait,AltPortraits,Name,Pictures,Description,IsPlayer
/// Number,Special,Special,Text,Files,Text,Boolean
/// (id),(texturename),[(texturenames)],(name),[(picturenames)],(description),(isplayer)
/// ...
/// </pre>
///
/// The Items, Locations, Variables, and Conversations section must contain:
/// <pre>
/// (Heading) -- where this is Items, Locations, Variables, or Conversations
/// ID,(field),(field),(field)...
/// Number,(fieldtype),(fieldtype),(fieldtype)...
/// (id),(fieldvalue),(fieldvalue),(fieldvalue)...
/// </pre>
/// The Variables section may have a final column InitialValueType that specifies the type (Text, Number, or Boolean).
///
/// The DialogueEntries section must contain:
/// <pre>
/// DialogueEntries
/// entrytag,ConvID,ID,Actor,Conversant,MenuText,DialogueText,IsGroup,FalseConditionAction,ConditionPriority,Conditions,Script,Sequence,(field),(field)...,canvasRect
/// Text,Number,Number,Number,Number,Text,Text,Boolean,Special,Special,Text,Text,Text,(fieldtype),(fieldtype),...,Text
/// (entrytag),(ConvID),(ID),(ActorID),(ConversantID),(MenuText),(DialogueText),(IsGroup),(FalseConditionAction),(ConditionPriority),(Conditions),(Script),(Sequence),(fieldvalue),(fieldvalue)...,#;#
/// </pre>
/// However canvasRect (the last column) is optional.
///
/// The OutgoingLinks section must contain:
/// <pre>
/// OutgoingLinks
/// OriginConvID,OriginID,DestConvID,DestID,ConditionPriority
/// Number,Number,Number,Number,Special
/// #,#,#,#,(priority)
/// </pre>
///
/// Omitted values in any particular asset should be tagged with "{{omit}}".
/// </summary>
public class CSVConverterWindow : AbstractConverterWindow
{
/// <summary>
/// Gets the source file extension (CSV).
/// </summary>
/// <value>The source file extension (e.g., 'xml' for XML files).</value>
public override string SourceFileExtension { get { return "csv"; } }
/// <summary>
/// Gets the EditorPrefs key to save the converter window's settings under.
/// </summary>
/// <value>The EditorPrefs key.</value>
public override string PrefsKey { get { return "PixelCrushers.DialogueSystem.CSVConverterSettings"; } }
/// <summary>
/// Menu item code to create a CSVConverterWindow.
/// </summary>
[MenuItem("Window/Dialogue System/Converters/CSV Converter", false, 1)]
public static void Init()
{
EditorWindow.GetWindow(typeof(CSVConverterWindow), false, "CSV Converter");
}
/// <summary>
/// A list of all asset type headings.
/// </summary>
private static List<string> AssetTypeHeadings = new List<string>()
{ "Database", "Actors", "Items", "Locations", "Variables", "Conversations", "DialogueEntries", "OutgoingLinks" };
/// <summary>
/// Special values aren't actually fields in an asset's Field array, but they're still
/// columns in the CSV that must be read and assigned to the asset's variables.
/// </summary>
private static List<string> DefaultSpecialValues = new List<string>()
{ "ID", "InitialValueType" };
/// <summary>
/// Portrait and AltPortraits are variables in the Actor class, not fields.
/// </summary>
private static List<string> ActorSpecialValues = new List<string>()
{ "ID", "Portrait", "AltPortraits" };
/// <summary>
/// The exporter manually places these columns at the front of dialogue entry rows, and the
/// importer reads them in the order that they were exported. Some of them are special
/// variables of the DialogueEntry class and not actually fields. MenuText and DialogueText
/// are fields, but the exporter puts them with this front section to make them more accessible
/// to people editing the CSV file.
/// </summary>
private static List<string> DialogueEntrySpecialValues = new List<string>()
{ "entrytag", "ConvID", "ID", "Actor", "Conversant", "Title", "MenuText", "DialogueText",
"IsGroup", "FalseConditionAction", "ConditionPriority", "Conditions", "Script" };
private static bool sortByID = true;
/// <summary>
/// Draws the destination section. You can override this if you want to draw
/// more than the default controls.
/// </summary>
protected override void DrawDestinationSection()
{
base.DrawDestinationSection();
EditorWindowTools.StartIndentedSection();
sortByID = EditorGUILayout.Toggle("Sort By ID", sortByID);
EditorWindowTools.EndIndentedSection();
}
private const int MaxIterations = 999999;
/// <summary>
/// Copies the source CSV file to a dialogue database. This method demonstrates
/// the helper methods LoadSourceFile(), IsSourceAtEnd(), PeekNextSourceLine(),
/// and GetNextSourceLine().
/// </summary>
/// <param name="database">Dialogue database to copy into.</param>
protected override void CopySourceToDialogueDatabase(DialogueDatabase database)
{
Debug.Log("Copying source to dialogue database");
var hadError = false;
var readConversations = false;
var readDialogueEntries = false;
try
{
try
{
LoadSourceFile();
int numLines = sourceLines.Count;
int safeguard = 0;
bool cancel = false;
while (!IsSourceAtEnd() && (safeguard < MaxIterations) && !cancel)
{
safeguard++;
cancel = EditorUtility.DisplayCancelableProgressBar("Converting CSV", "Please wait...", (float)sourceLines.Count / (float)numLines);
string line = GetNextSourceLine();
if (string.Equals(GetFirstField(line), "Database"))
{
ReadDatabaseProperties(database);
}
else if (string.Equals(GetFirstField(line), "Actors"))
{
ReadAssets<Actor>(database.actors, true);
}
else if (string.Equals(GetFirstField(line), "Items"))
{
ReadAssets<Item>(database.items, true);
}
else if (string.Equals(GetFirstField(line), "Locations"))
{
ReadAssets<Location>(database.locations, true);
}
else if (string.Equals(GetFirstField(line), "Variables"))
{
ReadAssets<Variable>(database.variables, true);
}
else if (string.Equals(GetFirstField(line), "Conversations"))
{
readConversations = true;
ReadAssets<Conversation>(database.conversations, true);
}
else if (string.Equals(GetFirstField(line), "DialogueEntries"))
{
ReadDialogueEntries(database, readConversations);
readDialogueEntries = readConversations;
}
else if (string.Equals(GetFirstField(line), "OutgoingLinks"))
{
ReadOutgoingLinks(database, readConversations && readDialogueEntries);
}
else {
throw new InvalidDataException("Line not recognized: " + line);
}
}
// If we skipped dialogue entries, we need to re-read them now:
if (!readDialogueEntries)
{
Debug.Log("Conversations section was after DialogueEntries section. Going back to read DialogueEntries now...");
LoadSourceFile();
safeguard = 0;
while (!IsSourceAtEnd() && (safeguard < MaxIterations) && !cancel)
{
safeguard++;
cancel = EditorUtility.DisplayCancelableProgressBar("Converting CSV", "Convering dialogue entries...", (float)sourceLines.Count / (float)numLines);
string line = GetNextSourceLine();
if (string.Equals(GetFirstField(line), "Database"))
{
ReadDatabaseProperties(database);
}
else if (string.Equals(GetFirstField(line), "Actors"))
{
ReadAssets<Actor>(database.actors, false);
}
else if (string.Equals(GetFirstField(line), "Items"))
{
ReadAssets<Item>(database.items, false);
}
else if (string.Equals(GetFirstField(line), "Locations"))
{
ReadAssets<Location>(database.locations, false);
}
else if (string.Equals(GetFirstField(line), "Variables"))
{
ReadAssets<Variable>(database.variables, false);
}
else if (string.Equals(GetFirstField(line), "Conversations"))
{
ReadAssets<Conversation>(database.conversations, false);
}
else if (string.Equals(GetFirstField(line), "DialogueEntries"))
{
ReadDialogueEntries(database, true);
}
else if (string.Equals(GetFirstField(line), "OutgoingLinks"))
{
ReadOutgoingLinks(database, true);
}
else {
throw new InvalidDataException("Line not recognized: " + line);
}
}
}
if (sortByID) SortAssetsByID(database);
}
catch (Exception e)
{
Debug.LogError(string.Format("{0}: CSV conversion failed: {1}\nLine {2}: {3}", DialogueDebug.Prefix, e.Message, currentLineNumber, currentSourceLine));
hadError = true;
}
}
finally
{
EditorUtility.ClearProgressBar();
if (hadError) Debug.LogWarning("Dialogue System: There were errors during conversion of " + prefs.sourceFilename + ".");
}
}
/// <summary>
/// Reads the database properties section.
/// </summary>
/// <param name="database">Dialogue database.</param>
private void ReadDatabaseProperties(DialogueDatabase database)
{
Debug.Log("Reading database properties");
GetNextSourceLine(); // Field headings
string[] values = GetValues(GetNextSourceLine());
if (values.Length < 8) throw new IndexOutOfRangeException("Incorrect number of values in database properties line");
database.name = values[0];
database.version = values[1];
database.author = values[2];
database.description = values[3];
database.emphasisSettings[0] = UnwrapEmField(values[4]);
database.emphasisSettings[1] = UnwrapEmField(values[5]);
database.emphasisSettings[2] = UnwrapEmField(values[6]);
database.emphasisSettings[3] = UnwrapEmField(values[7]);
GetNextSourceLine(); // Global User Script heading
database.globalUserScript = UnwrapValue(GetNextSourceLine());
}
/// <summary>
/// Reads a section of assets such as Actors, Items, etc.
/// </summary>
/// <param name="assets">List of assets to populate.</param>
/// <typeparam name="T">The type of asset.</typeparam>
private void ReadAssets<T>(List<T> assets, bool add) where T : Asset, new()
{
string typeName = typeof(T).Name;
bool isActorSection = (typeof(T) == typeof(Actor));
Debug.Log(string.Format("{0} {1} section", (add ? "Reading" : "Skipping"), typeName));
// Read field names and types:
string[] fieldNames = GetValues(GetNextSourceLine());
string[] fieldTypes = GetValues(GetNextSourceLine());
// Set up ignore list for values that aren't actual fields:
List<string> ignore = isActorSection ? ActorSpecialValues : DefaultSpecialValues;
// Keep reading until we reach another asset type heading or end of file:
int safeguard = 0;
while (!(IsSourceAtEnd() || AssetTypeHeadings.Contains(GetFirstField(PeekNextSourceLine()))))
{
safeguard++;
if (safeguard > MaxIterations) break;
string[] values = GetValues(GetNextSourceLine());
if (add)
{
// Create the asset:
T asset = new T();
asset.id = Tools.StringToInt(values[0]);
asset.fields = new List<Field>();
// Preprocess a couple extra values for actors:
if (isActorSection) FindActorPortraits(asset as Actor, values[1], values[2]);
// Read the remaining values and assign them to the asset's fields:
ReadAssetFields(fieldNames, fieldTypes, ignore, values, asset.fields);
// If the database already has an old asset with the same ID, delete it first:
assets.RemoveAll(a => a.id == asset.id);
// Finally, add the asset:
assets.Add(asset);
}
}
}
/// <summary>
/// Reads the asset fields.
/// </summary>
/// <param name="fieldNames">Field names.</param>
/// <param name="fieldTypes">Field types.</param>
/// <param name="ignore">List of field names to not add.</param>
/// <param name="values">CSV values.</param>
/// <param name="fields">Fields list of populate.</param>
private void ReadAssetFields(string[] fieldNames, string[] fieldTypes, List<string> ignore,
string[] values, List<Field> fields)
{
// Look for a special field named "InitialValueType" used in Variables section:
var isInitialValueTypeKnown = false;
var initialValueType = FieldType.Text;
for (int i = 0; i < fieldNames.Length; i++)
{
if (string.Equals(fieldNames[i], "InitialValueType"))
{
initialValueType = Field.StringToFieldType(values[i]);
isInitialValueTypeKnown = true;
break;
}
}
// Convert all fields:
for (int i = 0; i < fieldNames.Length; i++)
{
if ((ignore != null) && ignore.Contains(fieldNames[i])) continue;
if (string.Equals(values[i], "{{omit}}")) continue;
if (string.IsNullOrEmpty(fieldNames[i])) continue;
string title = fieldNames[i];
string value = values[i];
// Special handling required for Initial Value of Variables section:
FieldType type = (string.Equals(title, "Initial Value") && isInitialValueTypeKnown) ? initialValueType : GuessFieldType(value, fieldTypes[i]);
fields.Add(new Field(title, value, type));
}
}
private void FindActorPortraits(Actor actor, string portraitName, string alternatePortraitNames)
{
if (!string.IsNullOrEmpty(portraitName))
{
actor.portrait = AssetDatabase.LoadAssetAtPath(portraitName, typeof(Texture2D)) as Texture2D;
}
if (!(string.IsNullOrEmpty(alternatePortraitNames) || string.Equals(alternatePortraitNames, "[]")))
{
var inner = alternatePortraitNames.Substring(1, alternatePortraitNames.Length - 2);
var names = inner.Split(new char[] { ';' });
if (actor.alternatePortraits == null) actor.alternatePortraits = new List<Texture2D>();
foreach (var altPortraitName in names)
{
var texture = AssetDatabase.LoadAssetAtPath(altPortraitName, typeof(Texture2D)) as Texture2D;
if (texture != null)
{
actor.alternatePortraits.Add(texture);
}
}
}
}
/// <summary>
/// Reads the DialogueEntries section. DialogueEntry is not a subclass of Asset,
/// so we can't reuse the ReadAssets() code above.
/// </summary>
/// <param name="database">Dialogue database.</param>
private void ReadDialogueEntries(DialogueDatabase database, bool add)
{
Debug.Log((add ? "Reading" : "Skipping") + " DialogueEntries section");
// Read field names and types:
string[] fieldNames = GetValues(GetNextSourceLine());
string[] fieldTypes = GetValues(GetNextSourceLine());
// Keep reading until we reach another asset type heading or end of file:
int safeguard = 0;
while (!(IsSourceAtEnd() || AssetTypeHeadings.Contains(GetFirstField(PeekNextSourceLine()))))
{
safeguard++;
if (safeguard > MaxIterations) break;
string[] values = GetValues(GetNextSourceLine());
if (add)
{
// Create the dialogue entry:
DialogueEntry entry = new DialogueEntry();
entry.fields = new List<Field>();
// We can ignore value[0] (entrytag).
entry.conversationID = Tools.StringToInt(values[1]);
entry.id = Tools.StringToInt(values[2]);
entry.ActorID = Tools.StringToInt(values[3]);
entry.ConversantID = Tools.StringToInt(values[4]);
entry.Title = values[5];
entry.DefaultMenuText = values[6];
entry.DefaultDialogueText = values[7];
entry.isGroup = Tools.StringToBool(values[8]);
entry.falseConditionAction = values[9];
entry.conditionPriority = ConditionPriorityTools.StringToConditionPriority(values[10]);
entry.conditionsString = values[11];
entry.userScript = values[12];
// Read the remaining values and assign them to the asset's fields:
ReadAssetFields(fieldNames, fieldTypes, DialogueEntrySpecialValues, values, entry.fields);
// Convert canvasRect field to entry position on node editor canvas:
entry.UseCanvasRectField();
// Finally, add the asset:
var conversation = database.GetConversation(entry.conversationID);
if (conversation == null) throw new InvalidDataException(string.Format("Conversation {0} referenced in entry {1} not found", entry.conversationID, entry.id));
conversation.dialogueEntries.Add(entry);
}
}
}
/// <summary>
/// Reads the OutgoingLinks section. Again, Link is not a subclass of Asset,
/// so we can't reuse the ReadAssets() method.
/// </summary>
/// <param name="database">Dialogue database.</param>
private void ReadOutgoingLinks(DialogueDatabase database, bool add)
{
Debug.Log((add ? "Reading" : "Skipping") + " OutgoingLinks section");
GetNextSourceLine(); // Headings
GetNextSourceLine(); // Types
// Keep reading until we reach another asset type heading or end of file:
int safeguard = 0;
while (!(IsSourceAtEnd() || AssetTypeHeadings.Contains(GetFirstField(PeekNextSourceLine()))))
{
safeguard++;
if (safeguard > MaxIterations) break;
string[] values = GetValues(GetNextSourceLine());
if (add)
{
var link = new Link(Tools.StringToInt(values[0]), Tools.StringToInt(values[1]),
Tools.StringToInt(values[2]), Tools.StringToInt(values[3]));
link.priority = ConditionPriorityTools.StringToConditionPriority(values[4]);
var entry = database.GetDialogueEntry(link.originConversationID, link.originDialogueID);
if (entry == null)
{
Debug.LogWarning(string.Format("Dialogue System: CSV conversion error: dialogue entry {0}.{1} referenced in link was not found.", link.originConversationID, link.originDialogueID));
//throw new InvalidDataException(string.Format("Dialogue entry {0}.{1} referenced in link not found", link.originConversationID, link.originDialogueID));
}
else
{
entry.outgoingLinks.Add(link);
}
}
}
}
protected override void LoadSourceFile()
{
base.LoadSourceFile();
CombineMultilineSourceLines();
}
/// <summary>
/// Combines lines that are actually a multiline CSV row. This also helps prevent the
/// CSV-splitting regex from hanging due to catastrophic backtracking on unterminated quotes.
/// </summary>
private void CombineMultilineSourceLines()
{
int lineNum = 0;
int safeguard = 0;
while ((lineNum < sourceLines.Count) && (safeguard < MaxIterations))
{
safeguard++;
string line = sourceLines[lineNum];
if (line == null)
{
sourceLines.RemoveAt(lineNum);
}
else {
bool terminated = true;
char previousChar = (char)0;
for (int i = 0; i < line.Length; i++)
{
char currentChar = line[i];
bool isQuote = (currentChar == '"') && (previousChar != '\\');
if (isQuote) terminated = !terminated;
previousChar = currentChar;
}
if (terminated || (lineNum + 1) >= sourceLines.Count)
{
if (!terminated) sourceLines[lineNum] = line + '"';
lineNum++;
}
else {
sourceLines[lineNum] = line + "\\n" + sourceLines[lineNum + 1];
sourceLines.RemoveAt(lineNum + 1);
}
}
}
}
/// <summary>
/// Returns the individual comma-separated values in a line.
/// </summary>
/// <returns>The values.</returns>
/// <param name="line">Line.</param>
private string[] GetValues(string line)
{
Regex csvSplit = new Regex("(?:^|,)(\"(?:[^\"]+|\"\")*\"|[^,]*)", RegexOptions.Compiled);
List<string> values = new List<string>();
foreach (Match match in csvSplit.Matches(line))
{
values.Add(UnwrapValue(match.Value.TrimStart(',')));
}
return values.ToArray();
}
private string GetFirstField(string line)
{
if (line.Contains(","))
{
var values = line.Split(new char[] { ',' });
return values[0];
}
else {
return line;
}
}
/// <summary>
/// Returns a "fixed" version of a comma-separated value where escaped newlines
/// have been converted back into real newlines, and optional surrounding quotes
/// have been removed.
/// </summary>
/// <returns>The value.</returns>
/// <param name="value">Value.</param>
private string UnwrapValue(string value)
{
string s = value.Replace("\\n", "\n").Replace("\\r", "\r");
if (s.StartsWith("\"") && s.EndsWith("\""))
{
s = s.Substring(1, s.Length - 2).Replace("\"\"", "\"");
}
return s;
}
/// <summary>
/// Converts an emphasis field in the format "#rrggbbaa biu" into an EmphasisSetting object.
/// </summary>
/// <returns>An EmphasisSetting object.</returns>
/// <param name="emField">Em field.</param>
private EmphasisSetting UnwrapEmField(string emField)
{
return new EmphasisSetting(emField.Substring(0, 9), emField.Substring(10, 3));
}
/// <summary>
/// The CSV format isn't robust enough to describe if different assets define different
/// types for the same field name. This method checks if a "Text" field has a Boolean
/// or Number value and returns that type instead of Text.
/// </summary>
/// <returns>The field type.</returns>
/// <param name="value">Value.</param>
/// <param name="typeSpecifier">Type specifier.</param>
private FieldType GuessFieldType(string value, string typeSpecifier)
{
if (string.Equals(typeSpecifier, "Text") && !string.IsNullOrEmpty(value))
{
if (IsBoolean(value))
{
return FieldType.Boolean;
}
else if (IsNumber(value))
{
return FieldType.Number;
}
}
return Field.StringToFieldType(typeSpecifier);
}
/// <summary>
/// Determines whether a string represents a Boolean value.
/// </summary>
/// <returns><c>true</c> if this is a Boolean value; otherwise, <c>false</c>.</returns>
/// <param name="value">String value.</param>
private bool IsBoolean(string value)
{
return ((string.Compare(value, "True", System.StringComparison.OrdinalIgnoreCase) == 0) ||
(string.Compare(value, "False", System.StringComparison.OrdinalIgnoreCase) == 0));
}
/// <summary>
/// Determines whether a string represents a Number value.
/// </summary>
/// <returns><c>true</c> if this is a number; otherwise, <c>false</c>.</returns>
/// <param name="value">String value.</param>
private bool IsNumber(string value)
{
float n;
return float.TryParse(value, System.Globalization.NumberStyles.Any, System.Globalization.NumberFormatInfo.InvariantInfo, out n);
}
private void SortAssetsByID(DialogueDatabase database)
{
if (database == null) return;
database.actors.Sort((x, y) => x.id.CompareTo(y.id));
database.items.Sort((x, y) => x.id.CompareTo(y.id));
database.locations.Sort((x, y) => x.id.CompareTo(y.id));
database.variables.Sort((x, y) => x.id.CompareTo(y.id));
database.conversations.Sort((x, y) => x.id.CompareTo(y.id));
foreach (var conversation in database.conversations)
{
conversation.dialogueEntries.Sort((x, y) => x.id.CompareTo(y.id));
}
}
}
}
| |
//Licensed to the Apache Software Foundation(ASF) under one
//or more contributor license agreements.See the NOTICE file
//distributed with this work for additional information
//regarding copyright ownership.The ASF licenses this file
//to you under the Apache License, Version 2.0 (the
//"License"); you may not use this file except in compliance
//with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
//Unless required by applicable law or agreed to in writing,
//software distributed under the License is distributed on an
//"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
//KIND, either express or implied. See the License for the
//specific language governing permissions and limitations
//under the License.
using System;
using System.Configuration;
using System.IO;
using System.Linq;
using System.Net;
using System.Reflection;
namespace Paragon.Plugins.MessageBus
{
public class MessageBrokerConfiguration
{
public const string AppSettingKeyBrokerExePath = "BrokerExePath";
public const string AppSettingKeyBrokerLibPath = "BrokerLibraryPath";
public const string AppSettingKeyBrokerPort = "BrokerPort";
public const string AppSettingKeyBrokerLoggingConfig = "BrokerLoggingConfigFile";
private const string DefaultBrokerExePath = "\\\\{0}\\appstore\\51\\Production";
private const string DefaultBrokerLibPath = "\\\\{0}\\appstore\\1188\\Production\\messagebus.jar";
private const string DefaultBrokerLibPath2 = "\\\\{0}\\appstore\\1188\\Test\\messagebus.jar";
private readonly ILogger _logger;
private string _domain;
private Configuration _appConfig;
private Configuration AppConfig
{
get
{
if (_appConfig == null)
{
_appConfig = ConfigurationManager.OpenExeConfiguration(Assembly.GetExecutingAssembly().Location);
}
return _appConfig;
}
}
public MessageBrokerConfiguration(ILogger logger)
{
_logger = logger;
}
public string GetBrokerExePath()
{
var brokerExePath = GetSetting(AppConfig, AppSettingKeyBrokerExePath);
if (string.IsNullOrEmpty(brokerExePath) ||
brokerExePath.Equals("detect", StringComparison.InvariantCultureIgnoreCase))
{
brokerExePath = DetectJavaPath();
}
return brokerExePath;
}
public string GetBrokerLibPath()
{
var brokerLibPath = GetSetting(AppConfig, AppSettingKeyBrokerLibPath);
if (string.IsNullOrEmpty(brokerLibPath) ||
brokerLibPath.Equals("detect", StringComparison.InvariantCultureIgnoreCase))
{
brokerLibPath = DetectBrokerLibPath();
}
else
{
brokerLibPath = Path.GetFullPath(brokerLibPath);
}
return brokerLibPath;
}
public int GetBrokerPort(int defaultPort)
{
var brokerPort = GetSetting(AppConfig, AppSettingKeyBrokerPort);
if (!string.IsNullOrEmpty(brokerPort))
{
int portNum;
if (int.TryParse(brokerPort, out portNum))
{
return portNum;
}
}
return defaultPort;
}
public string GetBrokerLoggingConfiguration()
{
var brokerLoggingConfig = GetSetting(AppConfig, AppSettingKeyBrokerLoggingConfig);
if (string.IsNullOrEmpty(brokerLoggingConfig) || brokerLoggingConfig.Equals("detect", StringComparison.InvariantCultureIgnoreCase))
{
var file = Path.GetDirectoryName(GetType().Assembly.Location) + "\\messagebroker.log4j.xml";
brokerLoggingConfig = File.Exists(file) ? file : string.Empty;
}
return brokerLoggingConfig;
}
private string DetectJavaPath()
{
try
{
var javaReleasePath = string.Format(DefaultBrokerExePath, Domain);
// The java release is 2 POD packages - 32-bit and 64-bit variants.
// Find the 32-bit version by parsing the 'release' file in the root folder of each POD.
return Directory.GetDirectories(javaReleasePath)
.Select(Get32BitJavaPath)
.FirstOrDefault(path => !string.IsNullOrEmpty(path));
}
catch (Exception exception)
{
_logger.Error("Failed to detect Java path", exception);
}
return string.Empty;
}
private string Get32BitJavaPath(string packageFolder)
{
var releasePropertiesFilePaths = Directory.GetFiles(
packageFolder, "release", SearchOption.TopDirectoryOnly);
if (releasePropertiesFilePaths.Length != 1)
{
_logger.Warn(string.Format("No release file found in Java package [{0}]", packageFolder));
return null;
}
var properties = File.ReadAllLines(releasePropertiesFilePaths[0]);
if (properties.Any(p => p.StartsWith("OS_ARCH=") && p.Contains("86")))
{
// 32-bit Java
var javaExePath = Path.Combine(packageFolder, "bin\\java.exe");
if (File.Exists(javaExePath))
{
return javaExePath;
}
_logger.Error(string.Format("java.exe not found in [{0}]", packageFolder));
}
return null;
}
private string DetectBrokerLibPath()
{
try
{
// Use the domain of the local machine to figure out DFS share domain
var defaultBrokerLibPath = string.Format(DefaultBrokerLibPath, Domain);
if (File.Exists(defaultBrokerLibPath))
{
return defaultBrokerLibPath;
}
var defaultBrokerLibPath2 = string.Format(DefaultBrokerLibPath2, Domain);
if (File.Exists(defaultBrokerLibPath2))
{
return defaultBrokerLibPath2;
}
}
catch (Exception ex)
{
_logger.Error(string.Format("Failed to detect broker lib path: {0}", ex.Message));
}
return string.Empty;
}
private string Domain
{
get
{
if (string.IsNullOrEmpty(_domain))
{
var hostnameParts = Dns.GetHostEntry("localhost").HostName.Split('.');
_domain = hostnameParts.Length > 1 ? hostnameParts[1] : string.Empty;
}
return _domain;
}
}
private static string GetSetting(Configuration config, string key)
{
try
{
var entry = (config != null) ? (config.AppSettings.Settings[key]) : (null);
return (entry != null) ? (entry.Value) : (null);
}
catch
{
return null;
}
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Composition.Hosting;
using System.IO;
using System.Linq;
using System.Reflection;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Http;
using Microsoft.CodeAnalysis;
using Microsoft.Extensions.Logging;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using OmniSharp.Mef;
using OmniSharp.Middleware.Endpoint.Exports;
using OmniSharp.Models;
using OmniSharp.Models.UpdateBuffer;
using OmniSharp.Plugins;
namespace OmniSharp.Middleware.Endpoint
{
class LanguageModel
{
public string Language { get; set; }
public string FileName { get; set; }
}
abstract class EndpointHandler
{
public abstract Task<object> Handle(HttpContext context);
public static EndpointHandler Create<TRequest, TResponse>(IPredicateHandler languagePredicateHandler, CompositionHost host,
ILogger logger, OmniSharpEndpointMetadata item,
IEnumerable<Lazy<IRequestHandler, OmniSharpRequestHandlerMetadata>> handlers,
Lazy<EndpointHandler<UpdateBufferRequest, object>> updateBufferHandler,
IEnumerable<Plugin> plugins)
{
return new EndpointHandler<TRequest, TResponse>(languagePredicateHandler, host, logger, item, handlers.Where(x => x.Metadata.EndpointName == item.EndpointName), updateBufferHandler, plugins);
}
public static EndpointHandler Factory(IPredicateHandler languagePredicateHandler, CompositionHost host,
ILogger logger, OmniSharpEndpointMetadata item,
IEnumerable<Lazy<IRequestHandler, OmniSharpRequestHandlerMetadata>> handlers,
Lazy<EndpointHandler<UpdateBufferRequest, object>> updateBufferHandler,
IEnumerable<Plugin> plugins)
{
var createMethod = typeof(EndpointHandler).GetTypeInfo().DeclaredMethods.First(x => x.Name == nameof(EndpointHandler.Create));
return (EndpointHandler)createMethod.MakeGenericMethod(item.RequestType, item.ResponseType).Invoke(null, new object[] { languagePredicateHandler, host, logger, item, handlers, updateBufferHandler, plugins });
}
}
class EndpointHandler<TRequest, TResponse> : EndpointHandler
{
private readonly CompositionHost _host;
private readonly IPredicateHandler _languagePredicateHandler;
private readonly Lazy<Task<Dictionary<string, ExportHandler<TRequest, TResponse>>>> _exports;
private readonly OmniSharpWorkspace _workspace;
private readonly bool _hasLanguageProperty;
private readonly bool _hasFileNameProperty;
private readonly bool _canBeAggregated;
private readonly ILogger _logger;
private readonly IEnumerable<Plugin> _plugins;
private readonly Lazy<EndpointHandler<UpdateBufferRequest, object>> _updateBufferHandler;
public EndpointHandler(IPredicateHandler languagePredicateHandler, CompositionHost host, ILogger logger, OmniSharpEndpointMetadata item, IEnumerable<Lazy<IRequestHandler, OmniSharpRequestHandlerMetadata>> handlers, Lazy<EndpointHandler<UpdateBufferRequest, object>> updateBufferHandler, IEnumerable<Plugin> plugins)
{
EndpointName = item.EndpointName;
_host = host;
_logger = logger;
_languagePredicateHandler = languagePredicateHandler;
_plugins = plugins;
_workspace = host.GetExport<OmniSharpWorkspace>();
_hasLanguageProperty = item.RequestType.GetRuntimeProperty(nameof(LanguageModel.Language)) != null;
_hasFileNameProperty = item.RequestType.GetRuntimeProperty(nameof(Request.FileName)) != null;
_canBeAggregated = typeof(IAggregateResponse).IsAssignableFrom(item.ResponseType);
_updateBufferHandler = updateBufferHandler;
_exports = new Lazy<Task<Dictionary<string, ExportHandler<TRequest, TResponse>>>>(() => LoadExportHandlers(handlers));
}
private Task<Dictionary<string, ExportHandler<TRequest, TResponse>>> LoadExportHandlers(IEnumerable<Lazy<IRequestHandler, OmniSharpRequestHandlerMetadata>> handlers)
{
var interfaceHandlers = handlers
.Select(export => new RequestHandlerExportHandler<TRequest, TResponse>(export.Metadata.Language, (IRequestHandler<TRequest, TResponse>)export.Value))
.Cast<ExportHandler<TRequest, TResponse>>();
var plugins = _plugins.Where(x => x.Config.Endpoints.Contains(EndpointName))
.Select(plugin => new PluginExportHandler<TRequest, TResponse>(EndpointName, plugin))
.Cast<ExportHandler<TRequest, TResponse>>();
return Task.FromResult(interfaceHandlers
.Concat(plugins)
.ToDictionary(export => export.Language));
}
public string EndpointName { get; }
public override Task<object> Handle(HttpContext context)
{
var requestObject = DeserializeRequestObject(context.Request.Body);
var model = GetLanguageModel(requestObject);
return Process(context, model, requestObject);
}
public async Task<object> Process(HttpContext context, LanguageModel model, JToken requestObject)
{
var request = requestObject.ToObject<TRequest>();
if (request is Request && _updateBufferHandler.Value != null)
{
var realRequest = request as Request;
if (!string.IsNullOrWhiteSpace(realRequest.FileName) && (realRequest.Buffer != null || realRequest.Changes != null))
{
await _updateBufferHandler.Value.Process(context, model, requestObject);
}
}
if (_hasLanguageProperty)
{
// Handle cases where a request isn't aggrgate and a language isn't specified.
// This helps with editors calling a legacy end point, for example /metadata
if (!_canBeAggregated && string.IsNullOrWhiteSpace(model.Language))
{
model.Language = LanguageNames.CSharp;
}
return await HandleLanguageRequest(model.Language, request, context);
}
else if (_hasFileNameProperty)
{
var language = _languagePredicateHandler.GetLanguageForFilePath(model.FileName ?? string.Empty);
return await HandleLanguageRequest(language, request, context);
}
else
{
var language = _languagePredicateHandler.GetLanguageForFilePath(string.Empty);
if (!string.IsNullOrEmpty(language))
{
return await HandleLanguageRequest(language, request, context);
}
}
return await HandleAllRequest(request, context);
}
private Task<object> HandleLanguageRequest(string language, TRequest request, HttpContext context)
{
if (!string.IsNullOrEmpty(language))
{
return HandleSingleRequest(language, request, context);
}
return HandleAllRequest(request, context);
}
private async Task<object> HandleSingleRequest(string language, TRequest request, HttpContext context)
{
var exports = await _exports.Value;
if (exports.TryGetValue(language, out var handler))
{
return await handler.Handle(request);
}
throw new NotSupportedException($"{language} does not support {EndpointName}");
}
private async Task<object> HandleAllRequest(TRequest request, HttpContext context)
{
if (!_canBeAggregated)
{
throw new NotSupportedException($"Must be able aggregate the response to spread them out across all plugins for {EndpointName}");
}
var exports = await _exports.Value;
IAggregateResponse aggregateResponse = null;
var responses = new List<Task<TResponse>>();
foreach (var handler in exports.Values)
{
responses.Add(handler.Handle(request));
}
foreach (IAggregateResponse exportResponse in await Task.WhenAll(responses))
{
if (aggregateResponse != null)
{
aggregateResponse = aggregateResponse.Merge(exportResponse);
}
else
{
aggregateResponse = exportResponse;
}
}
object response = aggregateResponse;
if (response != null)
{
return response;
}
return null;
}
private LanguageModel GetLanguageModel(JToken jtoken)
{
var response = new LanguageModel();
var jobject = jtoken as JObject;
if (jobject == null)
{
return response;
}
if (jobject.TryGetValue(nameof(LanguageModel.Language), StringComparison.OrdinalIgnoreCase, out var token))
{
response.Language = token.ToString();
}
if (jobject.TryGetValue(nameof(LanguageModel.FileName), StringComparison.OrdinalIgnoreCase, out token))
{
response.FileName = token.ToString();
}
return response;
}
private JToken DeserializeRequestObject(Stream readStream)
{
try
{
using (var streamReader = new StreamReader(readStream))
{
using (var textReader = new JsonTextReader(streamReader))
{
return JToken.Load(textReader);
}
}
}
catch
{
return new JObject();
}
}
}
}
| |
//
// Copyright (c) Microsoft and contributors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
//
// See the License for the specific language governing permissions and
// limitations under the License.
//
// Warning: This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if the
// code is regenerated.
using System;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.WindowsAzure.Management.ExpressRoute;
using Microsoft.WindowsAzure.Management.ExpressRoute.Models;
namespace Microsoft.WindowsAzure
{
/// <summary>
/// The Express Route API provides programmatic access to the functionality
/// needed by the customer to set up Dedicated Circuits and Dedicated
/// Circuit Links. The Express Route Customer API is a REST API. All API
/// operations are performed over SSL and mutually authenticated using
/// X.509 v3 certificates. (see
/// http://msdn.microsoft.com/en-us/library/windowsazure/ee460799.aspx for
/// more information)
/// </summary>
public static partial class DedicatedCircuitOperationsExtensions
{
/// <summary>
/// The New Dedicated Circuit operation creates a new dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the New Dedicated Circuit
/// operation.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static ExpressRouteOperationResponse BeginNew(this IDedicatedCircuitOperations operations, DedicatedCircuitNewParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IDedicatedCircuitOperations)s).BeginNewAsync(parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The New Dedicated Circuit operation creates a new dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the New Dedicated Circuit
/// operation.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<ExpressRouteOperationResponse> BeginNewAsync(this IDedicatedCircuitOperations operations, DedicatedCircuitNewParameters parameters)
{
return operations.BeginNewAsync(parameters, CancellationToken.None);
}
/// <summary>
/// The Remove Dedicated Circuit operation deletes an existing
/// dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service key representing the dedicated circuit to be
/// deleted.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static ExpressRouteOperationResponse BeginRemove(this IDedicatedCircuitOperations operations, string serviceKey)
{
return Task.Factory.StartNew((object s) =>
{
return ((IDedicatedCircuitOperations)s).BeginRemoveAsync(serviceKey);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Remove Dedicated Circuit operation deletes an existing
/// dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service key representing the dedicated circuit to be
/// deleted.
/// </param>
/// <returns>
/// A standard service response including an HTTP status code and
/// request ID.
/// </returns>
public static Task<ExpressRouteOperationResponse> BeginRemoveAsync(this IDedicatedCircuitOperations operations, string serviceKey)
{
return operations.BeginRemoveAsync(serviceKey, CancellationToken.None);
}
/// <summary>
/// The Get Dedicated Circuit operation retrieves the specified
/// dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The service key representing the circuit.
/// </param>
/// <returns>
/// The Get Dedicated Circuit operation response.
/// </returns>
public static DedicatedCircuitGetResponse Get(this IDedicatedCircuitOperations operations, string serviceKey)
{
return Task.Factory.StartNew((object s) =>
{
return ((IDedicatedCircuitOperations)s).GetAsync(serviceKey);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Get Dedicated Circuit operation retrieves the specified
/// dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. The service key representing the circuit.
/// </param>
/// <returns>
/// The Get Dedicated Circuit operation response.
/// </returns>
public static Task<DedicatedCircuitGetResponse> GetAsync(this IDedicatedCircuitOperations operations, string serviceKey)
{
return operations.GetAsync(serviceKey, CancellationToken.None);
}
/// <summary>
/// The List Dedicated Circuit operation retrieves a list of dedicated
/// circuits owned by the customer.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <returns>
/// The List Dedicated Circuit operation response.
/// </returns>
public static DedicatedCircuitListResponse List(this IDedicatedCircuitOperations operations)
{
return Task.Factory.StartNew((object s) =>
{
return ((IDedicatedCircuitOperations)s).ListAsync();
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The List Dedicated Circuit operation retrieves a list of dedicated
/// circuits owned by the customer.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <returns>
/// The List Dedicated Circuit operation response.
/// </returns>
public static Task<DedicatedCircuitListResponse> ListAsync(this IDedicatedCircuitOperations operations)
{
return operations.ListAsync(CancellationToken.None);
}
/// <summary>
/// The New Dedicated Circuit operation creates a new dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Create Virtual Network Gateway
/// operation.
/// </param>
/// <returns>
/// The Get Dedicated Circuit operation response.
/// </returns>
public static DedicatedCircuitGetResponse New(this IDedicatedCircuitOperations operations, DedicatedCircuitNewParameters parameters)
{
return Task.Factory.StartNew((object s) =>
{
return ((IDedicatedCircuitOperations)s).NewAsync(parameters);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The New Dedicated Circuit operation creates a new dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='parameters'>
/// Required. Parameters supplied to the Create Virtual Network Gateway
/// operation.
/// </param>
/// <returns>
/// The Get Dedicated Circuit operation response.
/// </returns>
public static Task<DedicatedCircuitGetResponse> NewAsync(this IDedicatedCircuitOperations operations, DedicatedCircuitNewParameters parameters)
{
return operations.NewAsync(parameters, CancellationToken.None);
}
/// <summary>
/// The Remove Dedicated Circuit operation deletes an existing
/// dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service Key associated with the dedicated circuit to be
/// deleted.
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static ExpressRouteOperationStatusResponse Remove(this IDedicatedCircuitOperations operations, string serviceKey)
{
return Task.Factory.StartNew((object s) =>
{
return ((IDedicatedCircuitOperations)s).RemoveAsync(serviceKey);
}
, operations, CancellationToken.None, TaskCreationOptions.None, TaskScheduler.Default).Unwrap().GetAwaiter().GetResult();
}
/// <summary>
/// The Remove Dedicated Circuit operation deletes an existing
/// dedicated circuit.
/// </summary>
/// <param name='operations'>
/// Reference to the
/// Microsoft.WindowsAzure.Management.ExpressRoute.IDedicatedCircuitOperations.
/// </param>
/// <param name='serviceKey'>
/// Required. Service Key associated with the dedicated circuit to be
/// deleted.
/// </param>
/// <returns>
/// The response body contains the status of the specified asynchronous
/// operation, indicating whether it has succeeded, is inprogress, or
/// has failed. Note that this status is distinct from the HTTP status
/// code returned for the Get Operation Status operation itself. If
/// the asynchronous operation succeeded, the response body includes
/// the HTTP status code for the successful request. If the
/// asynchronous operation failed, the response body includes the HTTP
/// status code for the failed request, and also includes error
/// information regarding the failure.
/// </returns>
public static Task<ExpressRouteOperationStatusResponse> RemoveAsync(this IDedicatedCircuitOperations operations, string serviceKey)
{
return operations.RemoveAsync(serviceKey, CancellationToken.None);
}
}
}
| |
using System.Globalization;
using System.Net;
using CSharpGuidelinesAnalyzer.Rules.Maintainability;
using CSharpGuidelinesAnalyzer.Test.TestDataBuilders;
using Microsoft.CodeAnalysis.Diagnostics;
using Xunit;
namespace CSharpGuidelinesAnalyzer.Test.Specs.Maintainability
{
public sealed class DoNotDeclareRefOrOutParameterSpecs : CSharpGuidelinesAnalysisTestFixture
{
protected override string DiagnosticId => DoNotDeclareRefOrOutParameterAnalyzer.DiagnosticId;
[Fact]
internal void When_method_parameter_has_no_modifier_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M(int p)
{
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_method_parameter_has_ref_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M(ref int [|p|])
{
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_method_parameter_has_out_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M(out int [|p|])
{
p = 1;
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_method_parameter_has_in_modifier_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M(in int p)
{
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_method_parameter_has_out_modifier_in_deconstruct_method_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
public struct S
{
public string Text;
public bool IsEnabled;
public void Deconstruct(out string text, out bool isEnabled)
{
text = Text;
isEnabled = IsEnabled;
}
static void Test()
{
S s = new S
{
Text = string.Empty,
IsEnabled = true
};
(string a, bool b) = s;
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_method_parameter_has_ref_modifier_to_ref_struct_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
ref struct S
{
}
class C
{
void M(ref S s) => throw null;
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_constructor_parameter_has_no_modifier_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
class C
{
C(TimeSpan p)
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_constructor_parameter_has_ref_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
class C
{
C(ref bool [|p|])
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_constructor_parameter_has_out_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
class C
{
C(out string [|p|])
{
p = default;
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_local_function_parameter_has_no_modifier_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M()
{
void L(int p)
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_local_function_parameter_has_ref_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M()
{
void L(ref int [|p|])
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_local_function_parameter_has_out_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M()
{
void L(out int [|p|])
{
p = 1;
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_delegate_parameter_has_no_modifier_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
public delegate void D(DateTime p);
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_delegate_parameter_has_ref_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
public delegate void D(ref object [|p|]);
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_delegate_parameter_has_out_modifier_it_must_be_reported()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
public delegate void D(out string [|p|]);
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_method_parameter_has_ref_modifier_in_overridden_method_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
class C
{
protected virtual void M(ref string [|p|])
{
}
}
class D : C
{
protected override void M(ref string p)
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_method_parameter_has_ref_modifier_in_hidden_method_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
class C
{
protected virtual void M(ref string [|p|])
{
}
}
class D : C
{
protected new void M(ref string p)
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_method_parameter_has_ref_modifier_in_implicit_interface_implementation_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
interface I
{
void M(ref bool [|p|]);
}
class C : I
{
public void M(ref bool p)
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_method_parameter_has_ref_modifier_in_explicit_interface_implementation_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new TypeSourceCodeBuilder()
.InGlobalScope(@"
interface I
{
void M(ref bool [|p|]);
}
class C : I
{
void I.M(ref bool p)
{
}
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'p' is declared as ref or out");
}
[Fact]
internal void When_invocation_argument_has_ref_or_out_modifier_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M(int x, int y, int z)
{
Target(x, in y, ref z, out int _);
}
void Target(int a, in int b, ref int [|c|], out int [|d|]) => throw null;
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source,
"Parameter 'c' is declared as ref or out",
"Parameter 'd' is declared as ref or out");
}
[Fact]
internal void When_invocation_argument_has_out_modifier_in_TryParse_method_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.Using(typeof(CultureInfo).Namespace)
.Using(typeof(IPAddress).Namespace)
.InDefaultClass(@"
void M()
{
bool result1 = int.TryParse(string.Empty, out _);
float number;
bool result2 = Single.TryParse(string.Empty, NumberStyles.AllowDecimalPoint, CultureInfo.CurrentCulture, out number);
bool result3 = IPAddress.TryParse(string.Empty, out var ipAddress);
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_method_parameter_has_out_modifier_in_TryParse_method_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M()
{
bool result = Fraction.TryParse(string.Empty, out var value);
}
public sealed class Fraction
{
public static bool TryParse(string text, out Fraction value) => throw null;
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
[Fact]
internal void When_method_parameter_has_out_modifier_in_TryConvert_method_it_must_be_skipped()
{
// Arrange
ParsedSourceCode source = new MemberSourceCodeBuilder()
.InDefaultClass(@"
void M()
{
bool result = Fraction.TryConvert(string.Empty, out var value);
}
public sealed class Fraction
{
public static bool TryConvert(string text, out Fraction value) => throw null;
}
")
.Build();
// Act and assert
VerifyGuidelineDiagnostic(source);
}
protected override DiagnosticAnalyzer CreateAnalyzer()
{
return new DoNotDeclareRefOrOutParameterAnalyzer();
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Xml;
using System.IO;
using System.Net;
using Umbraco.Core;
using Umbraco.Core.Auditing;
using Umbraco.Core.Configuration;
using Umbraco.Core.Logging;
using Umbraco.Core.IO;
namespace umbraco.cms.businesslogic.packager.repositories
{
public class Repository : DisposableObject
{
public string Guid { get; private set; }
public string Name { get; private set; }
public string RepositoryUrl { get; private set; }
public string WebserviceUrl { get; private set; }
public RepositoryWebservice Webservice
{
get
{
var repo = new RepositoryWebservice(WebserviceUrl);
return repo;
}
}
public SubmitStatus SubmitPackage(string authorGuid, PackageInstance package, byte[] doc)
{
string packageName = package.Name;
string packageGuid = package.PackageGuid;
string description = package.Readme;
string packageFile = package.PackagePath;
System.IO.FileStream fs1 = null;
try
{
byte[] pack = new byte[0];
fs1 = System.IO.File.Open(IOHelper.MapPath(packageFile), FileMode.Open, FileAccess.Read);
pack = new byte[fs1.Length];
fs1.Read(pack, 0, (int) fs1.Length);
fs1.Close();
fs1 = null;
byte[] thumb = new byte[0]; //todo upload thumbnail...
return Webservice.SubmitPackage(Guid, authorGuid, packageGuid, pack, doc, thumb, packageName, "", "", description);
}
catch (Exception ex)
{
LogHelper.Error<Repository>("An error occurred in SubmitPackage", ex);
return SubmitStatus.Error;
}
}
public static List<Repository> getAll()
{
var repositories = new List<Repository>();
foreach (var r in UmbracoConfig.For.UmbracoSettings().PackageRepositories.Repositories)
{
var repository = new Repository
{
Guid = r.Id.ToString(),
Name = r.Name
};
repository.RepositoryUrl = r.RepositoryUrl;
repository.WebserviceUrl = repository.RepositoryUrl.Trim('/') + "/" + r.WebServiceUrl.Trim('/');
if (r.HasCustomWebServiceUrl)
{
string wsUrl = r.WebServiceUrl;
if (wsUrl.Contains("://"))
{
repository.WebserviceUrl = r.WebServiceUrl;
}
else
{
repository.WebserviceUrl = repository.RepositoryUrl.Trim('/') + "/" + wsUrl.Trim('/');
}
}
repositories.Add(repository);
}
return repositories;
}
public static Repository getByGuid(string repositoryGuid)
{
Guid id;
if (System.Guid.TryParse(repositoryGuid, out id) == false)
{
throw new FormatException("The repositoryGuid is not a valid GUID");
}
var found = UmbracoConfig.For.UmbracoSettings().PackageRepositories.Repositories.FirstOrDefault(x => x.Id == id);
if (found == null)
{
return null;
}
var repository = new Repository
{
Guid = found.Id.ToString(),
Name = found.Name
};
repository.RepositoryUrl = found.RepositoryUrl;
repository.WebserviceUrl = repository.RepositoryUrl.Trim('/') + "/" + found.WebServiceUrl.Trim('/');
if (found.HasCustomWebServiceUrl)
{
string wsUrl = found.WebServiceUrl;
if (wsUrl.Contains("://"))
{
repository.WebserviceUrl = found.WebServiceUrl;
}
else
{
repository.WebserviceUrl = repository.RepositoryUrl.Trim('/') + "/" + wsUrl.Trim('/');
}
}
return repository;
}
//shortcut method to download pack from repo and place it on the server...
public string fetch(string packageGuid)
{
return fetch(packageGuid, string.Empty);
}
public string fetch(string packageGuid, int userId)
{
// log
Audit.Add(AuditTypes.PackagerInstall,
string.Format("Package {0} fetched from {1}", packageGuid, this.Guid),
userId, -1);
return fetch(packageGuid);
}
public bool HasConnection()
{
string strServer = this.RepositoryUrl;
try
{
HttpWebRequest reqFP = (HttpWebRequest) HttpWebRequest.Create(strServer);
HttpWebResponse rspFP = (HttpWebResponse) reqFP.GetResponse();
if (HttpStatusCode.OK == rspFP.StatusCode)
{
// HTTP = 200 - Internet connection available, server online
rspFP.Close();
return true;
}
else
{
// Other status - Server or connection not available
rspFP.Close();
return false;
}
}
catch (WebException)
{
// Exception - connection not available
return false;
}
}
public string fetch(string packageGuid, string key)
{
byte[] fileByteArray = new byte[0];
if (key == string.Empty)
{
if (UmbracoConfig.For.UmbracoSettings().Content.UseLegacyXmlSchema)
fileByteArray = this.Webservice.fetchPackage(packageGuid);
else
fileByteArray = this.Webservice.fetchPackageByVersion(packageGuid, Version.Version41);
}
else
{
fileByteArray = this.Webservice.fetchProtectedPackage(packageGuid, key);
}
//successfull
if (fileByteArray.Length > 0)
{
// Check for package directory
if (Directory.Exists(IOHelper.MapPath(Settings.PackagerRoot)) == false)
Directory.CreateDirectory(IOHelper.MapPath(Settings.PackagerRoot));
using (var fs1 = new FileStream(IOHelper.MapPath(Settings.PackagerRoot + Path.DirectorySeparatorChar + packageGuid + ".umb"), FileMode.Create))
{
fs1.Write(fileByteArray, 0, fileByteArray.Length);
fs1.Close();
return "packages\\" + packageGuid + ".umb";
}
}
// log
return "";
}
/// <summary>
/// Handles the disposal of resources. Derived from abstract class <see cref="DisposableObject"/> which handles common required locking logic.
/// </summary>
protected override void DisposeResources()
{
Webservice.Dispose();
}
}
}
| |
#region License
/*
Licensed to Blue Chilli Technology Pty Ltd and the contributors under the MIT License (the "License").
You may not use this file except in compliance with the License.
See the LICENSE file in the project root for more information.
*/
#endregion
// ***********************************************************************
// Assembly : XLabs.Platform.Droid
// Author : XLabs Team
// Created : 12-27-2015
//
// Last Modified By : XLabs Team
// Last Modified On : 01-04-2016
// ***********************************************************************
// <copyright file="GeolocationContinuousListener.cs" company="XLabs Team">
// Copyright (c) XLabs Team. All rights reserved.
// </copyright>
// <summary>
// This project is licensed under the Apache 2.0 license
// https://github.com/XLabs/Xamarin-Forms-Labs/blob/master/LICENSE
//
// XLabs is a open source project that aims to provide a powerfull and cross
// platform set of controls tailored to work with Xamarin Forms.
// </summary>
// ***********************************************************************
//
using System;
using System.Collections.Generic;
using System.Threading;
using Android.Locations;
using Android.OS;
using Object = Java.Lang.Object;
namespace ChilliSource.Mobile.Location
{
internal class GeolocationContinuousListener : Object, ILocationListener
{
private string _activeProvider;
private Android.Locations.Location _lastLocation;
private TimeSpan _timePeriod;
private readonly HashSet<string> _activeProviders = new HashSet<string>();
private readonly LocationManager _manager;
/// <summary>
/// Initializes a new instance of the <see cref="GeolocationContinuousListener" /> class.
/// </summary>
/// <param name="manager">The manager.</param>
/// <param name="timePeriod">The time period.</param>
/// <param name="providers">The providers.</param>
public GeolocationContinuousListener(LocationManager manager, TimeSpan timePeriod, IList<string> providers)
{
_manager = manager;
_timePeriod = timePeriod;
foreach (var provider in providers)
{
if (manager.IsProviderEnabled(provider))
{
_activeProviders.Add(provider);
}
}
}
/// <summary>
/// Called when the location has changed.
/// </summary>
/// <param name="location">The new location, as a Location object.</param>
/// <since version="Added in API level 1" />
/// <remarks>
/// <para tool="javadoc-to-mdoc">
/// Called when the location has changed.
/// </para>
/// <para tool="javadoc-to-mdoc"> There are no restrictions on the use of the supplied Location object.</para>
/// <para tool="javadoc-to-mdoc">
/// <format type="text/html">
/// <a
/// href="http://developer.android.com/reference/android/location/LocationListener.html#onLocationChanged(android.location.Location)"
/// target="_blank">
/// [Android Documentation]
/// </a>
/// </format>
/// </para>
/// </remarks>
public void OnLocationChanged(Android.Locations.Location location)
{
if (location.Provider != _activeProvider)
{
if (_activeProvider != null && _manager.IsProviderEnabled(_activeProvider))
{
var provider = _manager.GetProvider(location.Provider);
var lapsed = GetTimeSpan(location.Time) - GetTimeSpan(_lastLocation.Time);
if (provider.Accuracy > _manager.GetProvider(_activeProvider).Accuracy && lapsed < _timePeriod.Add(_timePeriod))
{
location.Dispose();
return;
}
}
_activeProvider = location.Provider;
}
var previous = Interlocked.Exchange(ref _lastLocation, location);
if (previous != null)
{
previous.Dispose();
}
var position = new Position();
if (location.HasAccuracy)
{
position.Accuracy = location.Accuracy;
}
if (location.HasAltitude)
{
position.Altitude = location.Altitude;
}
if (location.HasBearing)
{
position.Heading = location.Bearing;
}
if (location.HasSpeed)
{
position.Speed = location.Speed;
}
position.Longitude = location.Longitude;
position.Latitude = location.Latitude;
position.Timestamp = LocationService.GetTimestamp(location);
PositionChanged?.Invoke(this, new PositionEventArgs(position));
}
/// <summary>
/// Called when the provider is disabled by the user.
/// </summary>
/// <param name="provider">
/// the name of the location provider associated with this update.
/// </param>
/// <since version="Added in API level 1" />
/// <remarks>
/// <para tool="javadoc-to-mdoc">
/// Called when the provider is disabled by the user. If requestLocationUpdates
/// is called on an already disabled provider, this method is called
/// immediately.
/// </para>
/// <para tool="javadoc-to-mdoc">
/// <format type="text/html">
/// <a
/// href="http://developer.android.com/reference/android/location/LocationListener.html#onProviderDisabled(java.lang.String)"
/// target="_blank">
/// [Android Documentation]
/// </a>
/// </format>
/// </para>
/// </remarks>
public void OnProviderDisabled(string provider)
{
if (provider == LocationManager.PassiveProvider)
{
return;
}
lock (_activeProviders)
{
if (_activeProviders.Remove(provider) && _activeProviders.Count == 0)
{
OnPositionError(new PositionErrorEventArgs(LocationErrorType.PositionUnavailable));
}
}
}
/// <summary>
/// Called when the provider is enabled by the user.
/// </summary>
/// <param name="provider">
/// The name of the location provider associated with this update
/// </param>
/// <since version="Added in API level 1" />
/// <remarks>
/// <para tool="javadoc-to-mdoc">Called when the provider is enabled by the user.</para>
/// <para tool="javadoc-to-mdoc">
/// <format type="text/html">
/// <a
/// href="http://developer.android.com/reference/android/location/LocationListener.html#onProviderEnabled(java.lang.String)"
/// target="_blank">
/// [Android Documentation]
/// </a>
/// </format>
/// </para>
/// </remarks>
public void OnProviderEnabled(string provider)
{
if (provider == LocationManager.PassiveProvider)
{
return;
}
lock (_activeProviders) _activeProviders.Add(provider);
}
/// <summary>
/// Called when the provider status changes.
/// </summary>
/// <param name="provider">
/// The name of the location provider associated with this update
/// </param>
/// <param name="status">
/// <c>
/// <see cref="F:Android.Locations.Availability.OutOfService" />
/// </c>
/// if the
/// provider is out of service, and this is not expected to change in the
/// near future;
/// <c>
/// <see cref="F:Android.Locations.Availability.TemporarilyUnavailable" />
/// </c>
/// if
/// the provider is temporarily unavailable but is expected to be available
/// shortly; and
/// <c>
/// <see cref="F:Android.Locations.Availability.Available" />
/// </c>
/// if the
/// provider is currently available.
/// </param>
/// <param name="extras">
/// An optional Bundle which will contain provider specific
/// status variables.
/// <para tool="javadoc-to-mdoc" />
/// A number of common key/value pairs for the extras Bundle are listed
/// below. Providers that use any of the keys on this list must
/// provide the corresponding value as described below.
/// <list type="bullet">
/// <item>
/// <term>
/// satellites - the number of satellites used to derive the fix
/// </term>
/// </item>
/// </list>
/// </param>
/// <since version="Added in API level 1" />
/// <remarks>
/// <para tool="javadoc-to-mdoc">
/// Called when the provider status changes. This method is called when
/// a provider is unable to fetch a location or if the provider has recently
/// become available after a period of unavailability.
/// </para>
/// <para tool="javadoc-to-mdoc">
/// <format type="text/html">
/// <a
/// href="http://developer.android.com/reference/android/location/LocationListener.html#onStatusChanged(java.lang.String, int, android.os.Bundle)"
/// target="_blank">
/// [Android Documentation]
/// </a>
/// </format>
/// </para>
/// </remarks>
public void OnStatusChanged(string provider, Availability status, Bundle extras)
{
switch (status)
{
case Availability.Available:
{
OnProviderEnabled(provider);
break;
}
case Availability.OutOfService:
{
OnProviderDisabled(provider);
break;
}
}
}
public event EventHandler<PositionErrorEventArgs> PositionError;
public event EventHandler<PositionEventArgs> PositionChanged;
private TimeSpan GetTimeSpan(long time)
{
return new TimeSpan(TimeSpan.TicksPerMillisecond * time);
}
private void OnPositionError(PositionErrorEventArgs e)
{
PositionError?.Invoke(this, e);
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
namespace System.Reflection.Emit
{
using System;
using System.Security;
using System.Runtime.InteropServices;
using System.Runtime.CompilerServices;
using System.Diagnostics.SymbolStore;
//-----------------------------------------------------------------------------------
// On Telesto, we don't ship the ISymWrapper.dll assembly. However, ReflectionEmit
// relies on that assembly to write out managed PDBs.
//
// This file implements the minimum subset of ISymWrapper.dll required to restore
// that functionality. Namely, the SymWriter and SymDocumentWriter objects.
//
// Ideally we wouldn't need ISymWrapper.dll on desktop either - it's an ugly piece
// of legacy. We could just use this (or COM-interop code) everywhere, but we might
// have to worry about compatibility.
//
// We've now got a real implementation even when no debugger is attached. It's
// up to the runtime to ensure it doesn't provide us with an insecure writer
// (eg. diasymreader) in the no-trust scenarios (no debugger, partial-trust code).
//-----------------------------------------------------------------------------------
//------------------------------------------------------------------------------
// SymWrapperCore is never instantiated and is used as an encapsulation class.
// It is our "ISymWrapper.dll" assembly within an assembly.
//------------------------------------------------------------------------------
class SymWrapperCore
{
//------------------------------------------------------------------------------
// Block instantiation
//------------------------------------------------------------------------------
private SymWrapperCore()
{
}
//------------------------------------------------------------------------------
// Implements Telesto's version of SymDocumentWriter (in the desktop world,
// this type is exposed from ISymWrapper.dll.)
//
// The only thing user code can do with this wrapper is to receive it from
// SymWriter.DefineDocument and pass it back to SymWriter.DefineSequencePoints.
//------------------------------------------------------------------------------
private unsafe class SymDocumentWriter : ISymbolDocumentWriter
{
//------------------------------------------------------------------------------
// Ctor
//------------------------------------------------------------------------------
internal SymDocumentWriter(PunkSafeHandle pDocumentWriterSafeHandle)
{
m_pDocumentWriterSafeHandle = pDocumentWriterSafeHandle;
// The handle is actually a pointer to a native ISymUnmanagedDocumentWriter.
m_pDocWriter = (ISymUnmanagedDocumentWriter *)m_pDocumentWriterSafeHandle.DangerousGetHandle();
m_vtable = (ISymUnmanagedDocumentWriterVTable)(Marshal.PtrToStructure(m_pDocWriter->m_unmanagedVTable, typeof(ISymUnmanagedDocumentWriterVTable)));
}
//------------------------------------------------------------------------------
// Returns the underlying ISymUnmanagedDocumentWriter* (as a safehandle.)
//------------------------------------------------------------------------------
internal PunkSafeHandle GetUnmanaged()
{
return m_pDocumentWriterSafeHandle;
}
//=========================================================================================
// Public interface methods start here. (Well actually, they're all NotSupported
// stubs since that's what they are on the real ISymWrapper.dll.)
//=========================================================================================
//------------------------------------------------------------------------------
// SetSource() wrapper
//------------------------------------------------------------------------------
void ISymbolDocumentWriter.SetSource(byte[] source)
{
throw new NotSupportedException(); // Intentionally not supported to match desktop CLR
}
//------------------------------------------------------------------------------
// SetCheckSum() wrapper
//------------------------------------------------------------------------------
void ISymbolDocumentWriter.SetCheckSum(Guid algorithmId, byte [] checkSum)
{
int hr = m_vtable.SetCheckSum(m_pDocWriter, algorithmId, (uint)checkSum.Length, checkSum);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
private delegate int DSetCheckSum(ISymUnmanagedDocumentWriter * pThis, Guid algorithmId, uint checkSumSize, [In] byte[] checkSum);
//------------------------------------------------------------------------------
// This layout must match the unmanaged ISymUnmanagedDocumentWriter* COM vtable
// exactly. If a member is declared as an IntPtr rather than a delegate, it means
// we don't call that particular member.
//------------------------------------------------------------------------------
[StructLayout(LayoutKind.Sequential)]
private struct ISymUnmanagedDocumentWriterVTable
{
internal IntPtr QueryInterface;
internal IntPtr AddRef;
internal IntPtr Release;
internal IntPtr SetSource;
internal DSetCheckSum SetCheckSum;
}
//------------------------------------------------------------------------------
// This layout must match the (start) of the unmanaged ISymUnmanagedDocumentWriter
// COM object.
//------------------------------------------------------------------------------
[StructLayout(LayoutKind.Sequential)]
private struct ISymUnmanagedDocumentWriter
{
internal IntPtr m_unmanagedVTable;
}
//------------------------------------------------------------------------------
// Stores underlying ISymUnmanagedDocumentWriter* pointer (wrapped in a safehandle.)
//------------------------------------------------------------------------------
private PunkSafeHandle m_pDocumentWriterSafeHandle;
private ISymUnmanagedDocumentWriter * m_pDocWriter;
//------------------------------------------------------------------------------
// Stores the "managed vtable" (actually a structure full of delegates that
// P/Invoke to the corresponding unmanaged COM methods.)
//------------------------------------------------------------------------------
private ISymUnmanagedDocumentWriterVTable m_vtable;
} // class SymDocumentWriter
//------------------------------------------------------------------------------
// Implements Telesto's version of SymWriter (in the desktop world,
// this type is expored from ISymWrapper.dll.)
//------------------------------------------------------------------------------
internal unsafe class SymWriter : ISymbolWriter
{
//------------------------------------------------------------------------------
// Creates a SymWriter. The SymWriter is a managed wrapper around the unmanaged
// symbol writer provided by the runtime (ildbsymlib or diasymreader.dll).
//------------------------------------------------------------------------------
internal static ISymbolWriter CreateSymWriter()
{
return new SymWriter();
}
//------------------------------------------------------------------------------
// Basic ctor. You'd think this ctor would take the unmanaged symwriter object as an argument
// but to fit in with existing desktop code, the unmanaged writer is passed in
// through a subsequent call to InternalSetUnderlyingWriter
//------------------------------------------------------------------------------
private SymWriter()
{
}
//=========================================================================================
// Public interface methods start here.
//=========================================================================================
//------------------------------------------------------------------------------
// Initialize() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.Initialize(IntPtr emitter, String filename, bool fFullBuild)
{
int hr = m_vtable.Initialize(m_pWriter, emitter, filename, (IntPtr)0, fFullBuild);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// DefineDocument() wrapper
//------------------------------------------------------------------------------
ISymbolDocumentWriter ISymbolWriter.DefineDocument(String url,
Guid language,
Guid languageVendor,
Guid documentType)
{
PunkSafeHandle psymUnmanagedDocumentWriter = new PunkSafeHandle();
int hr = m_vtable.DefineDocument(m_pWriter, url, ref language, ref languageVendor, ref documentType, out psymUnmanagedDocumentWriter);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
if (psymUnmanagedDocumentWriter.IsInvalid)
{
return null;
}
return new SymDocumentWriter(psymUnmanagedDocumentWriter);
}
//------------------------------------------------------------------------------
// SetUserEntryPoint() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.SetUserEntryPoint(SymbolToken entryMethod)
{
int hr = m_vtable.SetUserEntryPoint(m_pWriter, entryMethod.GetToken());
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// OpenMethod() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.OpenMethod(SymbolToken method)
{
int hr = m_vtable.OpenMethod(m_pWriter, method.GetToken());
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// CloseMethod() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.CloseMethod()
{
int hr = m_vtable.CloseMethod(m_pWriter);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// DefineSequencePoints() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.DefineSequencePoints(ISymbolDocumentWriter document,
int[] offsets,
int[] lines,
int[] columns,
int[] endLines,
int[] endColumns)
{
int spCount = 0;
if (offsets != null)
{
spCount = offsets.Length;
}
else if (lines != null)
{
spCount = lines.Length;
}
else if (columns != null)
{
spCount = columns.Length;
}
else if (endLines != null)
{
spCount = endLines.Length;
}
else if (endColumns != null)
{
spCount = endColumns.Length;
}
if (spCount == 0)
{
return;
}
if ( (offsets != null && offsets.Length != spCount) ||
(lines != null && lines.Length != spCount) ||
(columns != null && columns.Length != spCount) ||
(endLines != null && endLines.Length != spCount) ||
(endColumns != null && endColumns.Length != spCount) )
{
throw new ArgumentException();
}
// Sure, claim to accept any type that implements ISymbolDocumentWriter but the only one that actually
// works is the one returned by DefineDocument. The desktop ISymWrapper commits the same signature fraud.
// Ideally we'd just return a sealed opaque cookie type, which had an internal accessor to
// get the writer out.
// Regardless, this cast is important for security - we cannot allow our caller to provide
// arbitrary instances of this interface.
SymDocumentWriter docwriter = (SymDocumentWriter)document;
int hr = m_vtable.DefineSequencePoints(m_pWriter, docwriter.GetUnmanaged(), spCount, offsets, lines, columns, endLines, endColumns);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// OpenScope() wrapper
//------------------------------------------------------------------------------
int ISymbolWriter.OpenScope(int startOffset)
{
int ret;
int hr = m_vtable.OpenScope(m_pWriter, startOffset, out ret);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
return ret;
}
//------------------------------------------------------------------------------
// CloseScope() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.CloseScope(int endOffset)
{
int hr = m_vtable.CloseScope(m_pWriter, endOffset);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// SetScopeRange() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.SetScopeRange(int scopeID, int startOffset, int endOffset)
{
int hr = m_vtable.SetScopeRange(m_pWriter, scopeID, startOffset, endOffset);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// DefineLocalVariable() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.DefineLocalVariable(String name,
FieldAttributes attributes,
byte[] signature,
SymAddressKind addrKind,
int addr1,
int addr2,
int addr3,
int startOffset,
int endOffset)
{
int hr = m_vtable.DefineLocalVariable(m_pWriter,
name,
(int)attributes,
signature.Length,
signature,
(int)addrKind,
addr1,
addr2,
addr3,
startOffset,
endOffset);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// DefineParameter() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.DefineParameter(String name,
ParameterAttributes attributes,
int sequence,
SymAddressKind addrKind,
int addr1,
int addr2,
int addr3)
{
throw new NotSupportedException(); // Intentionally not supported to match desktop CLR
}
//------------------------------------------------------------------------------
// DefineField() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.DefineField(SymbolToken parent,
String name,
FieldAttributes attributes,
byte[] signature,
SymAddressKind addrKind,
int addr1,
int addr2,
int addr3)
{
throw new NotSupportedException(); // Intentionally not supported to match desktop CLR
}
//------------------------------------------------------------------------------
// DefineGlobalVariable() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.DefineGlobalVariable(String name,
FieldAttributes attributes,
byte[] signature,
SymAddressKind addrKind,
int addr1,
int addr2,
int addr3)
{
throw new NotSupportedException(); // Intentionally not supported to match desktop CLR
}
//------------------------------------------------------------------------------
// Close() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.Close()
{
int hr = m_vtable.Close(m_pWriter);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// SetSymAttribute() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.SetSymAttribute(SymbolToken parent, String name, byte[] data)
{
int hr = m_vtable.SetSymAttribute(m_pWriter, parent.GetToken(), name, data.Length, data);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// OpenNamespace() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.OpenNamespace(String name)
{
int hr = m_vtable.OpenNamespace(m_pWriter, name);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// CloseNamespace() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.CloseNamespace()
{
int hr = m_vtable.CloseNamespace(m_pWriter);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// UsingNamespace() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.UsingNamespace(String name)
{
int hr = m_vtable.UsingNamespace(m_pWriter, name);
if (hr < 0)
{
throw Marshal.GetExceptionForHR(hr);
}
}
//------------------------------------------------------------------------------
// SetMethodSourceRange() wrapper
//------------------------------------------------------------------------------
void ISymbolWriter.SetMethodSourceRange(ISymbolDocumentWriter startDoc,
int startLine,
int startColumn,
ISymbolDocumentWriter endDoc,
int endLine,
int endColumn)
{
throw new NotSupportedException(); // Intentionally not supported to match desktop CLR
}
//------------------------------------------------------------------------------
// SetUnderlyingWriter() wrapper.
//------------------------------------------------------------------------------
void ISymbolWriter.SetUnderlyingWriter(IntPtr ppUnderlyingWriter)
{
throw new NotSupportedException(); // Intentionally not supported on Telesto as it's a very unsafe api
}
//------------------------------------------------------------------------------
// InternalSetUnderlyingWriter() wrapper.
//
// Furnishes the native ISymUnmanagedWriter* pointer.
//
// The parameter is actually a pointer to a pointer to an ISymUnmanagedWriter. As
// with the real ISymWrapper.dll, ISymWrapper performs *no* Release (or AddRef) on pointers
// furnished through SetUnderlyingWriter. Lifetime management is entirely up to the caller.
//------------------------------------------------------------------------------
internal void InternalSetUnderlyingWriter(IntPtr ppUnderlyingWriter)
{
m_pWriter = *((ISymUnmanagedWriter**)ppUnderlyingWriter);
m_vtable = (ISymUnmanagedWriterVTable) (Marshal.PtrToStructure(m_pWriter->m_unmanagedVTable, typeof(ISymUnmanagedWriterVTable)));
}
//------------------------------------------------------------------------------
// Define delegates for the unmanaged COM methods we invoke.
//------------------------------------------------------------------------------
private delegate int DInitialize(ISymUnmanagedWriter* pthis,
IntPtr emitter, //IUnknown*
[MarshalAs(UnmanagedType.LPWStr)] String filename, //WCHAR*
IntPtr pIStream, //IStream*
[MarshalAs(UnmanagedType.Bool)] bool fFullBuild
);
private delegate int DDefineDocument(ISymUnmanagedWriter* pthis,
[MarshalAs(UnmanagedType.LPWStr)] String url,
[In] ref Guid language,
[In] ref Guid languageVender,
[In] ref Guid documentType,
[Out] out PunkSafeHandle ppsymUnmanagedDocumentWriter
);
private delegate int DSetUserEntryPoint(ISymUnmanagedWriter* pthis, int entryMethod);
private delegate int DOpenMethod(ISymUnmanagedWriter* pthis, int entryMethod);
private delegate int DCloseMethod(ISymUnmanagedWriter* pthis);
private delegate int DDefineSequencePoints(ISymUnmanagedWriter* pthis,
PunkSafeHandle document,
int spCount,
[In] int[] offsets,
[In] int[] lines,
[In] int[] columns,
[In] int[] endLines,
[In] int[] endColumns);
private delegate int DOpenScope(ISymUnmanagedWriter* pthis, int startOffset, [Out] out int pretval);
private delegate int DCloseScope(ISymUnmanagedWriter* pthis, int endOffset);
private delegate int DSetScopeRange(ISymUnmanagedWriter* pthis, int scopeID, int startOffset, int endOffset);
private delegate int DDefineLocalVariable(ISymUnmanagedWriter* pthis,
[MarshalAs(UnmanagedType.LPWStr)] String name,
int attributes,
int cSig,
[In] byte[] signature,
int addrKind,
int addr1,
int addr2,
int addr3,
int startOffset,
int endOffset
);
private delegate int DClose(ISymUnmanagedWriter* pthis);
private delegate int DSetSymAttribute(ISymUnmanagedWriter* pthis,
int parent,
[MarshalAs(UnmanagedType.LPWStr)] String name,
int cData,
[In] byte[] data
);
private delegate int DOpenNamespace(ISymUnmanagedWriter* pthis, [MarshalAs(UnmanagedType.LPWStr)] String name);
private delegate int DCloseNamespace(ISymUnmanagedWriter* pthis);
private delegate int DUsingNamespace(ISymUnmanagedWriter* pthis, [MarshalAs(UnmanagedType.LPWStr)] String name);
//------------------------------------------------------------------------------
// This layout must match the unmanaged ISymUnmanagedWriter* COM vtable
// exactly. If a member is declared as an IntPtr rather than a delegate, it means
// we don't call that particular member.
//------------------------------------------------------------------------------
[StructLayout(LayoutKind.Sequential)]
private struct ISymUnmanagedWriterVTable
{
internal IntPtr QueryInterface;
internal IntPtr AddRef;
internal IntPtr Release;
internal DDefineDocument DefineDocument;
internal DSetUserEntryPoint SetUserEntryPoint;
internal DOpenMethod OpenMethod;
internal DCloseMethod CloseMethod;
internal DOpenScope OpenScope;
internal DCloseScope CloseScope;
internal DSetScopeRange SetScopeRange;
internal DDefineLocalVariable DefineLocalVariable;
internal IntPtr DefineParameter;
internal IntPtr DefineField;
internal IntPtr DefineGlobalVariable;
internal DClose Close;
internal DSetSymAttribute SetSymAttribute;
internal DOpenNamespace OpenNamespace;
internal DCloseNamespace CloseNamespace;
internal DUsingNamespace UsingNamespace;
internal IntPtr SetMethodSourceRange;
internal DInitialize Initialize;
internal IntPtr GetDebugInfo;
internal DDefineSequencePoints DefineSequencePoints;
}
//------------------------------------------------------------------------------
// This layout must match the (start) of the unmanaged ISymUnmanagedWriter
// COM object.
//------------------------------------------------------------------------------
[StructLayout(LayoutKind.Sequential)]
private struct ISymUnmanagedWriter
{
internal IntPtr m_unmanagedVTable;
}
//------------------------------------------------------------------------------
// Stores native ISymUnmanagedWriter* pointer.
//
// As with the real ISymWrapper.dll, ISymWrapper performs *no* Release (or AddRef) on this pointer.
// Managing lifetime is up to the caller (coreclr.dll).
//------------------------------------------------------------------------------
private ISymUnmanagedWriter *m_pWriter;
//------------------------------------------------------------------------------
// Stores the "managed vtable" (actually a structure full of delegates that
// P/Invoke to the corresponding unmanaged COM methods.)
//------------------------------------------------------------------------------
private ISymUnmanagedWriterVTable m_vtable;
} // class SymWriter
} //class SymWrapperCore
//--------------------------------------------------------------------------------------
// SafeHandle for RAW MTA IUnknown's.
//
// ! Because the Release occurs in the finalizer thread, this safehandle really takes
// ! an ostrich approach to apartment issues. We only tolerate this here because we're emulating
// ! the desktop CLR's use of ISymWrapper which also pays lip service to COM apartment rules.
// !
// ! However, think twice about pulling this safehandle out for other uses.
//
// Had to make this a non-nested class since FCall's don't like to bind to nested classes.
//--------------------------------------------------------------------------------------
sealed class PunkSafeHandle : SafeHandle
{
internal PunkSafeHandle()
: base((IntPtr)0, true)
{
}
override protected bool ReleaseHandle()
{
m_Release(handle);
return true;
}
public override bool IsInvalid
{
get { return handle == ((IntPtr)0); }
}
private delegate void DRelease(IntPtr punk); // Delegate type for P/Invoking to coreclr.dll and doing an IUnknown::Release()
private static DRelease m_Release;
[MethodImplAttribute(MethodImplOptions.InternalCall)]
private static extern IntPtr nGetDReleaseTarget(); // FCall gets us the native DRelease target (so we don't need named dllexport from coreclr.dll)
static PunkSafeHandle()
{
m_Release = (DRelease)(Marshal.GetDelegateForFunctionPointer(nGetDReleaseTarget(), typeof(DRelease)));
m_Release((IntPtr)0); // make one call to make sure the delegate is fully prepped before we're in the critical finalizer situation.
}
} // PunkSafeHandle
} //namespace System.Reflection.Emit
| |
//-----------------------------------------------------------------------------
// Copyright (c) 2012 GarageGames, LLC
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to
// deal in the Software without restriction, including without limitation the
// rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
// sell copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
// FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
// IN THE SOFTWARE.
//-----------------------------------------------------------------------------
// RocketLauncher weapon.
// This script file contains all of the necessary datablocks needed for the
// RocketLauncher. These datablocks include sound profiles, light descriptions,
// particle effects, explosions, projectiles, items (weapon and ammo), shell
// casings (if any), and finally the weapon image which contains the state
// machine that determines how the weapon operates.
// The main "fire" method/mode is handled in "../scripts/server/weapons.cs"
// through a "WeaponImage" namespace function. This reduces duplicated code,
// although a unique fire method could still be implemented for this weapon.
// The "alt-fire" method/mode is handled in "../scripts/server/rocketlaucner.cs".
// Alt-fire for the Rocketlauncher allows you to "charge up" the number of
// projectiles, up to 3, that get fired. Hold to increase the number of shots
// and release to fire. After three shots are loaded and in the pipe, the
// weapon will automatically discharge on it's own.
// ----------------------------------------------------------------------------
// Sound profiles
// ----------------------------------------------------------------------------
datablock SFXProfile(RocketLauncherReloadSound)
{
filename = "art/sound/weapons/Crossbow_reload";
description = AudioClose3d;
preload = true;
};
datablock SFXProfile(RocketLauncherFireSound)
{
filename = "art/sound/weapons/explosion_mono_01";
description = AudioClose3d;
preload = true;
};
datablock SFXProfile(RocketLauncherIncLoadSound)
{
filename = "art/sound/weapons/relbow_mono_01";
description = AudioClose3d;
preload = true;
};
datablock SFXProfile(RocketLauncherFireEmptySound)
{
filename = "art/sound/weapons/Crossbow_firing_empty";
description = AudioClose3d;
preload = true;
};
datablock SFXProfile(RocketLauncherExplosionSound)
{
filename = "art/sound/weapons/Crossbow_explosion";
description = AudioDefault3d;
preload = true;
};
// ----------------------------------------------------------------------------
// Lights for the projectile(s)
// ----------------------------------------------------------------------------
datablock LightDescription(RocketLauncherLightDesc)
{
range = 4.0;
color = "1 1 0";
brightness = 5.0;
animationType = PulseLightAnim;
animationPeriod = 0.25;
//flareType = SimpleLightFlare0;
};
datablock LightDescription(RocketLauncherWaterLightDesc)
{
radius = 2.0;
color = "1 1 1";
brightness = 5.0;
animationType = PulseLightAnim;
animationPeriod = 0.25;
//flareType = SimpleLightFlare0;
};
//----------------------------------------------------------------------------
// Debris
//----------------------------------------------------------------------------
datablock ParticleData(RocketDebrisTrailParticle)
{
textureName = "art/shapes/particles/impact";
dragCoeffiecient = 0;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 1200;//1000;
lifetimeVarianceMS = 299;//500;
useInvAlpha = true;//false;
spinSpeed = 1;
spinRandomMin = -300.0;
spinRandomMax = 0;
colors[0] = "1 0.897638 0.795276 0.4";
colors[1] = "0.795276 0.795276 0.795276 0.6";
colors[2] = "0 0 0 0";
sizes[0] = 0.5;//1.0;
sizes[1] = 2;
sizes[2] = 1;//1.0;
times[0] = 0.0;
times[1] = 0.498039;
times[2] = 1.0;
animTexName = "art/shapes/particles/impact";
times[3] = "1";
};
datablock ParticleEmitterData(RocketDebrisTrailEmitter)
{
ejectionPeriodMS = 6;//8;
periodVarianceMS = 2;//4;
ejectionVelocity = 1.0;
velocityVariance = 0.5;
thetaMin = 0.0;
thetaMax = 180.0;
phiReferenceVel = 0;
phiVariance = 360;
ejectionoffset = 0.0;//0.3;
particles = "RocketDebrisTrailParticle";
};
datablock DebrisData(RocketDebris)
{
shapeFile = "art/shapes/weapons/SwarmGun/rocket.dts";
emitters[0] = RocketDebrisTrailEmitter;
elasticity = 0.5;
friction = 0.5;
numBounces = 1;//2;
bounceVariance = 1;
explodeOnMaxBounce = true;
staticOnMaxBounce = false;
snapOnMaxBounce = false;
minSpinSpeed = 400;
maxSpinSpeed = 800;
render2D = false;
lifetime = 0.25;//0.5;//1;//2;
lifetimeVariance = 0.0;//0.25;//0.5;
velocity = 35;//30;//15;
velocityVariance = 10;//5;
fade = true;
useRadiusMass = true;
baseRadius = 0.3;
gravModifier = 1.0;
terminalVelocity = 45;
ignoreWater = false;
};
// ----------------------------------------------------------------------------
// Splash effects
// ----------------------------------------------------------------------------
datablock ParticleData(RocketSplashMist)
{
dragCoefficient = 1.0;
windCoefficient = 2.0;
gravityCoefficient = 0.3;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 600;
lifetimeVarianceMS = 100;
useInvAlpha = false;
spinRandomMin = -90.0;
spinRandomMax = 500.0;
spinSpeed = 1;
textureName = "art/shapes/particles/smoke";
colors[0] = "0.7 0.8 1.0 1.0";
colors[1] = "0.7 0.8 1.0 0.5";
colors[2] = "0.7 0.8 1.0 0.0";
sizes[0] = 0.2;//0.5;
sizes[1] = 0.4;//0.5;
sizes[2] = 0.8;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RocketSplashMistEmitter)
{
ejectionPeriodMS = 5;
periodVarianceMS = 0;
ejectionVelocity = 3.0;
velocityVariance = 2.0;
ejectionOffset = 0.15;
thetaMin = 85;
thetaMax = 85;
phiReferenceVel = 0;
phiVariance = 360;
overrideAdvance = false;
lifetimeMS = 250;
particles = "RocketSplashMist";
};
datablock ParticleData(RocketSplashParticle)
{
dragCoefficient = 1;
windCoefficient = 0.9;
gravityCoefficient = 0.3;
inheritedVelFactor = 0.2;
constantAcceleration = -1.4;
lifetimeMS = 600;
lifetimeVarianceMS = 200;
textureName = "art/shapes/particles/droplet";
colors[0] = "0.7 0.8 1.0 1.0";
colors[1] = "0.7 0.8 1.0 0.5";
colors[2] = "0.7 0.8 1.0 0.0";
sizes[0] = 0.5;
sizes[1] = 0.25;
sizes[2] = 0.25;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RocketSplashEmitter)
{
ejectionPeriodMS = 4;
periodVarianceMS = 0;
ejectionVelocity = 7.3;
velocityVariance = 2.0;
ejectionOffset = 0.0;
thetaMin = 30;
thetaMax = 80;
phiReferenceVel = 00;
phiVariance = 360;
overrideAdvance = false;
orientParticles = true;
orientOnVelocity = true;
lifetimeMS = 100;
particles = "RocketSplashParticle";
};
datablock ParticleData(RocketSplashRingParticle)
{
textureName = "art/shapes/particles/wake";
dragCoefficient = 0.0;
gravityCoefficient = 0.0;
inheritedVelFactor = 0.0;
lifetimeMS = 2500;
lifetimeVarianceMS = 200;
windCoefficient = 0.0;
useInvAlpha = 1;
spinRandomMin = 30.0;
spinRandomMax = 30.0;
spinSpeed = 1;
animateTexture = true;
framesPerSec = 1;
animTexTiling = "2 1";
animTexFrames = "0 1";
colors[0] = "0.7 0.8 1.0 1.0";
colors[1] = "0.7 0.8 1.0 0.5";
colors[2] = "0.7 0.8 1.0 0.0";
sizes[0] = 2.0;
sizes[1] = 4.0;
sizes[2] = 8.0;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RocketSplashRingEmitter)
{
lifetimeMS = "100";
ejectionPeriodMS = 200;
periodVarianceMS = 10;
ejectionVelocity = 0;
velocityVariance = 0;
ejectionOffset = 0;
thetaMin = 89;
thetaMax = 90;
phiReferenceVel = 0;
phiVariance = 1;
alignParticles = 1;
alignDirection = "0 0 1";
particles = "RocketSplashRingParticle";
};
datablock SplashData(RocketSplash)
{
// numSegments = 15;
// ejectionFreq = 15;
// ejectionAngle = 40;
// ringLifetime = 0.5;
// lifetimeMS = 300;
// velocity = 4.0;
// startRadius = 0.0;
// acceleration = -3.0;
// texWrap = 5.0;
// texture = "art/shapes/particles/splash";
emitter[0] = RocketSplashEmitter;
emitter[1] = RocketSplashMistEmitter;
emitter[2] = RocketSplashRingEmitter;
// colors[0] = "0.7 0.8 1.0 0.0";
// colors[1] = "0.7 0.8 1.0 0.3";
// colors[2] = "0.7 0.8 1.0 0.7";
// colors[3] = "0.7 0.8 1.0 0.0";
//
// times[0] = 0.0;
// times[1] = 0.4;
// times[2] = 0.8;
// times[3] = 1.0;
};
// ----------------------------------------------------------------------------
// Explosion Particle effects
// ----------------------------------------------------------------------------
datablock ParticleData(RocketExpFire)
{
gravityCoefficient = "-0.50061";
lifetimeMS = "400";
lifetimeVarianceMS = "299";
spinSpeed = "1";
spinRandomMin = "-200";
spinRandomMax = "0";
textureName = "art/shapes/particles/smoke";
animTexName = "art/shapes/particles/smoke";
colors[0] = "1 0.897638 0.795276 1";
colors[1] = "0.795276 0.393701 0 0.6";
colors[2] = "0 0 0 0";
sizes[0] = "1.99902";
sizes[1] = "7.99915";
sizes[2] = "3.99805";
times[1] = "0.392157";
times[2] = "1";
times[3] = "1";
};
datablock ParticleEmitterData(RocketExpFireEmitter)
{
ejectionPeriodMS = "10";
periodVarianceMS = "5";
ejectionVelocity = "3";
velocityVariance = "2";
particles = "RocketExpFire";
blendStyle = "NORMAL";
};
datablock ParticleData(RocketExpFireball)
{
textureName = "art/shapes/particles/fireball.png";
lifetimeMS = "300";
lifetimeVarianceMS = "299";
spinSpeed = "1";
spinRandomMin = "-400";
spinRandomMax = "0";
animTexName = "art/shapes/particles/fireball.png";
colors[0] = "1 0.897638 0.795276 0.2";
colors[1] = "1 0.496063 0 0.6";
colors[2] = "0.0944882 0.0944882 0.0944882 0";
sizes[0] = "0.997986";
sizes[1] = "1.99902";
sizes[2] = "2.99701";
times[1] = "0.498039";
times[2] = "1";
times[3] = "1";
gravityCoefficient = "-1";
};
datablock ParticleEmitterData(RocketExpFireballEmitter)
{
particles = "RocketExpFireball";
blendStyle = "ADDITIVE";
ejectionPeriodMS = "10";
periodVarianceMS = "5";
ejectionVelocity = "4";
velocityVariance = "2";
ejectionOffset = "2";
thetaMax = "120";
};
datablock ParticleData(RocketExpSmoke)
{
lifetimeMS = 1200;//"1250";
lifetimeVarianceMS = 299;//200;//"250";
textureName = "art/shapes/particles/smoke";
animTexName = "art/shapes/particles/smoke";
useInvAlpha = "1";
gravityCoefficient = "-0.100122";
spinSpeed = "1";
spinRandomMin = "-100";
spinRandomMax = "0";
colors[0] = "0.897638 0.795276 0.692913 0.4";//"0.192157 0.192157 0.192157 0.0944882";
colors[1] = "0.897638 0.897638 0.897638 0.8";//"0.454902 0.454902 0.454902 0.897638";
colors[2] = "0.4 0.4 0.4 0";//"1 1 1 0";
sizes[0] = "1.99597";
sizes[1] = "3.99805";
sizes[2] = "7.99915";
times[1] = "0.494118";
times[2] = "1";
times[3] = "1";
};
datablock ParticleEmitterData(RocketExpSmokeEmitter)
{
ejectionPeriodMS = "15";
periodVarianceMS = "5";
//ejectionOffset = "1";
thetaMax = "180";
particles = "RocketExpSmoke";
blendStyle = "NORMAL";
};
datablock ParticleData(RocketExpSparks)
{
textureName = "art/shapes/particles/droplet.png";
lifetimeMS = "100";
lifetimeVarianceMS = "50";
animTexName = "art/shapes/particles/droplet.png";
inheritedVelFactor = "0.391389";
sizes[0] = "1.99902";
sizes[1] = "2.49954";
sizes[2] = "0.997986";
colors[0] = "1.0 0.9 0.8 0.2";
colors[1] = "1.0 0.9 0.8 0.8";
colors[2] = "0.8 0.4 0.0 0.0";
times[0] = "0";
times[1] = "0.34902";
times[2] = "1";
times[3] = "1";
};
datablock ParticleEmitterData(RocketExpSparksEmitter)
{
particles = "RocketExpSparks";
blendStyle = "NORMAL";
ejectionPeriodMS = "10";
periodVarianceMS = "5";
ejectionVelocity = "60";
velocityVariance = "10";
thetaMax = "120";
phiReferenceVel = 0;
phiVariance = "360";
ejectionOffset = "0";
orientParticles = true;
orientOnVelocity = true;
};
datablock ParticleData(RocketExpSubFireParticles)
{
textureName = "art/shapes/particles/fireball.png";
gravityCoefficient = "-0.202686";
lifetimeMS = "400";
lifetimeVarianceMS = "299";
spinSpeed = "1";
spinRandomMin = "-200";
spinRandomMax = "0";
animTexName = "art/shapes/particles/fireball.png";
colors[0] = "1 0.897638 0.795276 0.2";
colors[1] = "1 0.496063 0 1";
colors[2] = "0.0944882 0.0944882 0.0944882 0";
sizes[0] = "0.997986";
sizes[1] = "1.99902";
sizes[2] = "2.99701";
times[1] = "0.498039";
times[2] = "1";
times[3] = "1";
};
datablock ParticleEmitterData(RocketExpSubFireEmitter)
{
particles = "RocketExpSubFireParticles";
blendStyle = "ADDITIVE";
ejectionPeriodMS = "10";
periodVarianceMS = "5";
ejectionVelocity = "4";
velocityVariance = "2";
thetaMax = "120";
};
datablock ParticleData(RocketExpSubSmoke)
{
textureName = "art/shapes/particles/smoke";
gravityCoefficient = "-0.40293";
lifetimeMS = "800";
lifetimeVarianceMS = "299";
spinSpeed = "1";
spinRandomMin = "-200";
spinRandomMax = "0";
animTexName = "art/shapes/particles/smoke";
colors[0] = "0.4 0.35 0.3 0.393701";
colors[1] = "0.45 0.45 0.45 0.795276";
colors[2] = "0.4 0.4 0.4 0";
sizes[0] = "1.99902";
sizes[1] = "3.99805";
sizes[2] = "7.99915";
times[1] = "0.4";
times[2] = "1";
times[3] = "1";
};
datablock ParticleEmitterData(RocketExpSubSmokeEmitter)
{
particles = "RocketExpSubSmoke";
ejectionPeriodMS = "30";
periodVarianceMS = "10";
ejectionVelocity = "2";
velocityVariance = "1";
ejectionOffset = 1;//"2";
blendStyle = "NORMAL";
};
// ----------------------------------------------------------------------------
// Water Explosion
// ----------------------------------------------------------------------------
datablock ParticleData(RLWaterExpDust)
{
textureName = "art/shapes/particles/steam";
dragCoefficient = 1.0;
gravityCoefficient = -0.01;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 2500;
lifetimeVarianceMS = 250;
useInvAlpha = false;
spinSpeed = 1;
spinRandomMin = -90.0;
spinRandomMax = 500.0;
colors[0] = "0.6 0.6 1.0 0.5";
colors[1] = "0.6 0.6 1.0 0.3";
sizes[0] = 0.25;
sizes[1] = 1.5;
times[0] = 0.0;
times[1] = 1.0;
};
datablock ParticleEmitterData(RLWaterExpDustEmitter)
{
ejectionPeriodMS = 1;
periodVarianceMS = 0;
ejectionVelocity = 10;
velocityVariance = 0.0;
ejectionOffset = 0.0;
thetaMin = 85;
thetaMax = 85;
phiReferenceVel = 0;
phiVariance = 360;
overrideAdvances = false;
lifetimeMS = 75;
particles = "RLWaterExpDust";
};
datablock ParticleData(RLWaterExpSparks)
{
textureName = "art/shapes/particles/spark_wet";
dragCoefficient = 1;
gravityCoefficient = 0.0;
inheritedVelFactor = 0.2;
constantAcceleration = 0.0;
lifetimeMS = 500;
lifetimeVarianceMS = 250;
colors[0] = "0.6 0.6 1.0 1.0";
colors[1] = "0.6 0.6 1.0 1.0";
colors[2] = "0.6 0.6 1.0 0.0";
sizes[0] = 0.5;
sizes[1] = 0.5;
sizes[2] = 0.75;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RLWaterExpSparkEmitter)
{
ejectionPeriodMS = 2;
periodVarianceMS = 0;
ejectionVelocity = 12;
velocityVariance = 6.75;
ejectionOffset = 0.0;
thetaMin = 0;
thetaMax = 60;
phiReferenceVel = 0;
phiVariance = 360;
overrideAdvances = false;
orientParticles = true;
lifetimeMS = 100;
particles = "RLWaterExpSparks";
};
datablock ParticleData(RLWaterExpSmoke)
{
textureName = "art/shapes/particles/smoke";
dragCoeffiecient = 0.4;
gravityCoefficient = -0.25;
inheritedVelFactor = 0.025;
constantAcceleration = -1.1;
lifetimeMS = 1250;
lifetimeVarianceMS = 0;
useInvAlpha = false;
spinSpeed = 1;
spinRandomMin = -200.0;
spinRandomMax = 200.0;
colors[0] = "0.1 0.1 1.0 1.0";
colors[1] = "0.4 0.4 1.0 1.0";
colors[2] = "0.4 0.4 1.0 0.0";
sizes[0] = 2.0;
sizes[1] = 6.0;
sizes[2] = 2.0;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RLWaterExpSmokeEmitter)
{
ejectionPeriodMS = 15;
periodVarianceMS = 0;
ejectionVelocity = 6.25;
velocityVariance = 0.25;
thetaMin = 0.0;
thetaMax = 90.0;
lifetimeMS = 250;
particles = "RLWaterExpSmoke";
};
datablock ParticleData(RLWaterExpBubbles)
{
textureName = "art/shapes/particles/millsplash01";
dragCoefficient = 0.0;
gravityCoefficient = -0.05;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 1500;
lifetimeVarianceMS = 250;
useInvAlpha = false;
spinRandomMin = -100.0;
spinRandomMax = 100.0;
spinSpeed = 1;
colors[0] = "0.7 0.8 1.0 0.0";
colors[1] = "0.7 0.8 1.0 0.4";
colors[2] = "0.7 0.8 1.0 0.0";
sizes[0] = 0.2;
sizes[1] = 0.4;
sizes[2] = 0.8;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RLWaterExpBubbleEmitter)
{
ejectionPeriodMS = 5;
periodVarianceMS = 0;
ejectionVelocity = 1.0;
ejectionOffset = 3.0;
velocityVariance = 0.5;
thetaMin = 0;
thetaMax = 80;
phiReferenceVel = 0;
phiVariance = 360;
overrideAdvances = false;
particles = "RLWaterExpBubbles";
};
datablock ExplosionData(RocketLauncherWaterExplosion)
{
//soundProfile = RLWaterExplosionSound;
emitter[0] = RLWaterExpDustEmitter;
emitter[1] = RLWaterExpSparkEmitter;
emitter[2] = RLWaterExpSmokeEmitter;
emitter[3] = RLWaterExpBubbleEmitter;
shakeCamera = true;
camShakeFreq = "10.0 11.0 9.0";
camShakeAmp = "20.0 20.0 20.0";
camShakeDuration = 1.5;
camShakeRadius = 20.0;
lightStartRadius = 20.0;
lightEndRadius = 0.0;
lightStartColor = "0.9 0.9 0.8";
lightEndColor = "0.6 0.6 1.0";
lightStartBrightness = 2.0;
lightEndBrightness = 0.0;
};
// ----------------------------------------------------------------------------
// Dry/Air Explosion Objects
// ----------------------------------------------------------------------------
datablock ExplosionData(RocketSubExplosion)
{
lifeTimeMS = 100;
offset = 0.4;
emitter[0] = RocketExpSubFireEmitter;
emitter[1] = RocketExpSubSmokeEmitter;
};
datablock ExplosionData(RocketLauncherExplosion)
{
soundProfile = RocketLauncherExplosionSound;
lifeTimeMS = 200; // I want a quick bang and dissipation, not a slow burn-out
// Volume particles
particleEmitter = RocketExpSmokeEmitter;
particleDensity = 10;//20;
particleRadius = 1;//2;
// Point emission
emitter[0] = RocketExpFireEmitter;
emitter[1] = RocketExpSparksEmitter;
emitter[2] = RocketExpSparksEmitter;
emitter[3] = RocketExpFireballEmitter;
// Sub explosion objects
subExplosion[0] = RocketSubExplosion;
// Camera Shaking
shakeCamera = true;
camShakeFreq = "10.0 11.0 9.0";
camShakeAmp = "15.0 15.0 15.0";
camShakeDuration = 1.5;
camShakeRadius = 20;
// Exploding debris
debris = RocketDebris;
debrisThetaMin = 0;//10;
debrisThetaMax = 90;//80;
debrisNum = 5;
debrisNumVariance = 2;
debrisVelocity = 1;//2;
debrisVelocityVariance = 0.2;//0.5;
lightStartRadius = 6.0;
lightEndRadius = 0.0;
lightStartColor = "1.0 0.7 0.2";
lightEndColor = "0.9 0.7 0.0";
lightStartBrightness = 2.5;
lightEndBrightness = 0.0;
lightNormalOffset = 3.0;
};
// ----------------------------------------------------------------------------
// Underwater Rocket projectile trail
// ----------------------------------------------------------------------------
datablock ParticleData(RocketTrailWaterParticle)
{
textureName = "art/shapes/particles/bubble";
dragCoefficient = 0.0;
gravityCoefficient = 0.1;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 1500;
lifetimeVarianceMS = 600;
useInvAlpha = false;
spinRandomMin = -100.0;
spinRandomMax = 100.0;
spinSpeed = 1;
colors[0] = "0.7 0.8 1.0 1.0";
colors[1] = "0.7 0.8 1.0 0.4";
colors[2] = "0.7 0.8 1.0 0.0";
sizes[0] = 0.05;
sizes[1] = 0.05;
sizes[2] = 0.05;
times[0] = 0.0;
times[1] = 0.5;
times[2] = 1.0;
};
datablock ParticleEmitterData(RocketTrailWaterEmitter)
{
ejectionPeriodMS = 5;
periodVarianceMS = 0;
ejectionVelocity = 1.0;
ejectionOffset = 0.1;
velocityVariance = 0.5;
thetaMin = 0.0;
thetaMax = 80.0;
phiReferenceVel = 0;
phiVariance = 360;
overrideAdvances = false;
particles = RocketTrailWaterParticle;
};
// ----------------------------------------------------------------------------
// Normal-fire Projectile Object
// ----------------------------------------------------------------------------
datablock ParticleData(RocketProjSmokeTrail)
{
textureName = "art/shapes/particles/smoke";
dragCoeffiecient = 0;
gravityCoefficient = -0.202686;
inheritedVelFactor = 0.0;
constantAcceleration = 0.0;
lifetimeMS = 750;
lifetimeVarianceMS = 749;
useInvAlpha = true;
spinRandomMin = -60;
spinRandomMax = 0;
spinSpeed = 1;
colors[0] = "0.3 0.3 0.3 0.598425";
colors[1] = "0.9 0.9 0.9 0.897638";
colors[2] = "0.9 0.9 0.9 0";
sizes[0] = 0.247207;
sizes[1] = 0.497467;
sizes[2] = 0.747726;
times[0] = 0.0;
times[1] = 0.4;
times[2] = 1.0;
animTexName = "art/shapes/particles/smoke";
times[3] = "1";
};
datablock ParticleEmitterData(RocketProjSmokeTrailEmitter)
{
ejectionPeriodMS = 1;
periodVarianceMS = 0;
ejectionVelocity = 0.75;
velocityVariance = 0;
thetaMin = 0.0;
thetaMax = 0.0;
phiReferenceVel = 90;
phiVariance = 0;
particles = "RocketProjSmokeTrail";
};
datablock ProjectileData(RocketLauncherProjectile)
{
projectileShapeName = "art/shapes/weapons/SwarmGun/rocket.dts";
directDamage = 30;
radiusDamage = 30;
damageRadius = 5;
areaImpulse = 2500;
explosion = RocketLauncherExplosion;
waterExplosion = RocketLauncherWaterExplosion;
decal = ScorchRXDecal;
splash = RocketSplash;
particleEmitter = RocketProjSmokeTrailEmitter;
particleWaterEmitter = RocketTrailWaterEmitter;
muzzleVelocity = 100;
velInheritFactor = 0.3;
armingDelay = 0;
lifetime = 5000; //(500m / 100m/s = 5000ms)
fadeDelay = 4500;
bounceElasticity = 0;
bounceFriction = 0;
isBallistic = false;
gravityMod = 0.80;
lightDesc = RocketLauncherLightDesc;
damageType = "RocketDamage";
};
// ----------------------------------------------------------------------------
// Underwater Projectile
// ----------------------------------------------------------------------------
datablock ProjectileData(RocketWetProjectile)
{
projectileShapeName = "art/shapes/weapons/SwarmGun/rocket.dts";
directDamage = 20;
radiusDamage = 10;
damageRadius = 10;
areaImpulse = 2000;
explosion = RocketLauncherWaterExplosion;
particleEmitter = RocketProjSmokeTrailEmitter;
particleWaterEmitter = RocketTrailWaterEmitter;
muzzleVelocity = 20;
velInheritFactor = 0.3;
armingDelay = 0;
lifetime = 5000; //(500m / 100m/s = 5000ms)
fadeDelay = 4500;
bounceElasticity = 0.2;
bounceFriction = 0.4;
isBallistic = true;
gravityMod = 0.80;
lightDesc = RocketLauncherWaterLightDesc;
damageType = "RocketDamage";
};
// ----------------------------------------------------------------------------
// Shell that's ejected during reload.
// ----------------------------------------------------------------------------
datablock DebrisData(RocketlauncherShellCasing)
{
shapeFile = "art/shapes/weapons/SwarmGun/rocket.dts";
lifetime = 6.0;
minSpinSpeed = 300.0;
maxSpinSpeed = 400.0;
elasticity = 0.65;
friction = 0.05;
numBounces = 5;
staticOnMaxBounce = true;
snapOnMaxBounce = false;
fade = true;
};
// ----------------------------------------------------------------------------
// Particle Emitter played when firing.
// ----------------------------------------------------------------------------
datablock ParticleData(RocketLauncherfiring1Particle)
{
textureName = "art/shapes/particles/Fireball";
//dragCoefficient = 100.0;
dragCoefficient = 5;
gravityCoefficient = -0.25;//-0.5;//0.0;
inheritedVelFactor = 0.25;//1.0;
constantAcceleration = 0.1;
lifetimeMS = 400;
lifetimeVarianceMS = 100;
useInvAlpha = false;
spinSpeed = 1;
spinRandomMin = -200;
spinRandomMax = 200;
colors[0] = "1 0.9 0.8 0.1";
colors[1] = "1 0.5 0 0.3";
colors[2] = "0.1 0.1 0.1 0";
sizes[0] = 0.2;//1;
sizes[1] = 0.25;//0.15;//0.75;
sizes[2] = 0.3;//0.1;//0.5;
times[0] = 0.0;
times[1] = 0.5;//0.294118;
times[2] = 1.0;
};
datablock ParticleEmitterData(RocketLauncherfiring1Emitter)
{
ejectionPeriodMS = 15;//75;
periodVarianceMS = 5;
ejectionVelocity = 1;
ejectionOffset = 0.0;
velocityVariance = 0;
thetaMin = 0.0;
thetaMax = 180;//10.0;
particles = "RocketLauncherfiring1Particle";
blendStyle = "ADDITIVE";
};
datablock ParticleData(RocketLauncherfiring2Particle)
{
textureName = "art/shapes/particles/impact";
//dragCoefficient = 100.0;
dragCoefficient = 5;
gravityCoefficient = -0.5;//0.0;
inheritedVelFactor = 0.25;//1.0;
constantAcceleration = 0.1;
lifetimeMS = 1600;//400;
lifetimeVarianceMS = 400;//100;
useInvAlpha = false;
spinSpeed = 1;
spinRandomMin = -200;
spinRandomMax = 200;
colors[0] = "0.4 0.4 0.4 0.2";
colors[1] = "0.4 0.4 0.4 0.1";
colors[2] = "0.0 0.0 0.0 0.0";
sizes[0] = 0.2;//1;
sizes[1] = 0.15;//0.75;
sizes[2] = 0.1;//0.5;
times[0] = 0.0;
times[1] = 0.5;//0.294118;
times[2] = 1.0;
};
datablock ParticleEmitterData(RocketLauncherfiring2Emitter)
{
ejectionPeriodMS = 15;//75;
periodVarianceMS = 5;
ejectionVelocity = 1;
ejectionOffset = 0.0;
velocityVariance = 0;
thetaMin = 0.0;
thetaMax = 180;//10.0;
particles = "RocketLauncherfiring2Particle";
blendStyle = "NORMAL";
};
// ----------------------------------------------------------------------------
// Ammo Item
// ----------------------------------------------------------------------------
datablock ItemData(RocketLauncherAmmo)
{
// Mission editor category
category = "Ammo";
// Add the Ammo namespace as a parent. The ammo namespace provides
// common ammo related functions and hooks into the inventory system.
className = "Ammo";
// Basic Item properties
shapeFile = "art/shapes/weapons/SwarmGun/rocket.dts";
mass = 2;
elasticity = 0.2;
friction = 0.6;
// Dynamic properties defined by the scripts
pickUpName = "Rockets";
maxInventory = 20;
clip = LurkerClip;
};
// ----------------------------------------------------------------------------
// Weapon Item. This is the item that exists in the world,
// i.e. when it's been dropped, thrown or is acting as re-spawnable item.
// When the weapon is mounted onto a shape, the Image is used.
// ----------------------------------------------------------------------------
datablock ItemData(RocketLauncher)
{
// Mission editor category
category = "Weapon";
// Hook into Item Weapon class hierarchy. The weapon namespace
// provides common weapon handling functions in addition to hooks
// into the inventory system.
className = "Weapon";
// Basic Item properties
shapefile = "art/shapes/weapons/SwarmGun/swarmgun.dts";
mass = 5;
elasticity = 0.2;
friction = 0.6;
emap = true;
// Dynamic properties defined by the scripts
pickUpName = "SwarmGun";
description = "RocketLauncher";
image = RocketLauncherImage;
// weaponHUD
previewImage = 'swarmer.png';
reticle = 'reticle_rocketlauncher';
zoomReticle = 'bino';
};
// ----------------------------------------------------------------------------
// Image which does all the work. Images do not normally exist in
// the world, they can only be mounted on ShapeBase objects.
// ----------------------------------------------------------------------------
datablock ShapeBaseImageData(RocketLauncherImage)
{
// Basic Item properties
shapefile = "art/shapes/weapons/SwarmGun/swarmgun.dts";
emap = true;
// Specify mount point & offset for 3rd person, and eye offset
// for first person rendering.
mountPoint = 0;
offset = "0.0 0.15 0.025";
eyeOffset = "0.25 0.6 -0.4"; // 0.25=right/left 0.5=forward/backward, -0.5=up/down
// When firing from a point offset from the eye, muzzle correction
// will adjust the muzzle vector to point to the eye LOS point.
// Since this weapon doesn't actually fire from the muzzle point,
// we need to turn this off.
correctMuzzleVector = false;
// Add the WeaponImage namespace as a parent, WeaponImage namespace
// provides some hooks into the inventory system.
className = "WeaponImage";
// Projectile && Ammo.
item = RocketLauncher;
ammo = RocketLauncherAmmo;
projectile = RocketLauncherProjectile;
wetProjectile = RocketWetProjectile;
projectileType = Projectile;
// shell casings
casing = RocketlauncherShellCasing;
shellExitDir = "1.0 0.3 1.0";
shellExitOffset = "0.15 -0.56 -0.1";
shellExitVariance = 15.0;
shellVelocity = 3.0;
// Let there be light - NoLight, ConstantLight, PulsingLight, WeaponFireLight.
lightType = "WeaponFireLight";
lightColor = "1.0 1.0 0.9";
lightDuration = 200;
lightRadius = 10;
// Images have a state system which controls how the animations
// are run, which sounds are played, script callbacks, etc. This
// state system is downloaded to the client so that clients can
// predict state changes and animate accordingly. The following
// system supports basic ready->fire->reload transitions as
// well as a no-ammo->dryfire idle state.
// Initial start up state
stateName[0] = "Preactivate";
stateTransitionOnLoaded[0] = "Activate";
stateTransitionOnNoAmmo[0] = "NoAmmo";
// Activating the gun.
// Called when the weapon is first mounted and there is ammo.
stateName[1] = "Activate";
stateTransitionOnTimeout[1] = "Ready";
stateTimeoutValue[1] = 0.6;
stateSequence[1] = "Activate";
// Ready to fire, just waiting for the trigger
stateName[2] = "Ready";
stateTransitionOnNoAmmo[2] = "NoAmmo";
stateTransitionOnTriggerDown[2] = "CheckWet";
stateTransitionOnAltTriggerDown[2] = "CheckWetAlt";
stateSequence[2] = "Ready";
// Fire the weapon. Calls the fire script which does the actual work.
stateName[3] = "Fire";
stateTransitionOnTimeout[3] = "PostFire";
stateTimeoutValue[3] = 0.9;
stateFire[3] = true;
stateRecoil[3] = LightRecoil;
stateAllowImageChange[3] = false;
stateSequence[3] = "Fire";
stateScript[3] = "onFire";
stateSound[3] = RocketLauncherFireSound;
stateEmitter[3] = RocketLauncherfiring1Emitter;
stateEmitterTime[3] = 0.6;
// Check ammo
stateName[4] = "PostFire";
stateTransitionOnAmmo[4] = "Reload";
stateTransitionOnNoAmmo[4] = "NoAmmo";
// Play the reload animation, and transition into
stateName[5] = "Reload";
stateTransitionOnTimeout[5] = "Ready";
stateTimeoutValue[5] = 0.9;
stateAllowImageChange[5] = false;
stateSequence[5] = "Reload";
stateEjectShell[5] = false; // set to true to enable shell casing eject
stateSound[5] = RocketLauncherReloadSound;
stateEmitter[5] = RocketLauncherfiring2Emitter;
stateEmitterTime[5] = 2.4;
// No ammo in the weapon, just idle until something shows up.
// Play the dry fire sound if the trigger iS pulled.
stateName[6] = "NoAmmo";
stateTransitionOnAmmo[6] = "Reload";
stateSequence[6] = "NoAmmo";
stateTransitionOnTriggerDown[6] = "DryFire";
// No ammo dry fire
stateName[7] = "DryFire";
stateTimeoutValue[7] = 1.0;
stateTransitionOnTimeout[7] = "NoAmmo";
stateSound[7] = RocketLauncherFireEmptySound;
// Check if wet
stateName[8] = "CheckWet";
stateTransitionOnWet[8] = "WetFire";
stateTransitionOnNotWet[8] = "Fire";
// Check if alt wet
stateName[9] = "CheckWetAlt";
stateTransitionOnWet[9] = "WetFire";
stateTransitionOnNotWet[9] = "ChargeUp1";
// Wet fire
stateName[10] = "WetFire";
stateTransitionOnTimeout[10] = "PostFire";
stateTimeoutValue[10] = 0.9;
stateFire[10] = true;
stateRecoil[10] = LightRecoil;
stateAllowImageChange[10] = false;
stateSequence[10] = "Fire";
stateScript[10] = "onWetFire";
stateSound[10] = RocketLauncherFireSound;
// Begin "charge up", 1 in the pipe
stateName[11] = "ChargeUp1";
stateScript[11] = "readyLoad";
stateSound[11] = RocketLauncherIncLoadSound;
stateTransitionOnAltTriggerUp[11] = "AltFire";
stateTransitionOnTimeout[11] = "ChargeUp2";
stateTimeoutValue[11] = 0.8;
stateWaitForTimeout[11] = false;
// Charge up, 2 in the pipe
stateName[12] = "ChargeUp2";
stateScript[12] = "incLoad";
stateSound[12] = RocketLauncherIncLoadSound;
stateTransitionOnAltTriggerUp[12] = "AltFire";
stateTransitionOnTimeout[12] = "ChargeUp3";
stateTimeoutValue[12] = 0.8;
stateWaitForTimeout[12] = false;
// Charge up, 3 in the pipe
stateName[13] = "ChargeUp3";
stateScript[13] = "incLoad";
stateSound[13] = RocketLauncherIncLoadSound;
stateTransitionOnAltTriggerUp[13] = "AltFire";
stateTransitionOnTimeout[13] = "Altfire"; // lets force them to fire
stateTimeOutValue[13] = 1.2;
stateWaitForTimeout[13] = false;
// Alt-fire
stateName[14] = "AltFire";
stateTransitionOnTimeout[14] = "PostFire";
stateTimeoutValue[14] = 1.2;
stateFire[14] = true;
stateRecoil[14] = LightRecoil;
stateAllowImageChange[14] = false;
stateSequence[14] = "Fire";
stateScript[14] = "onAltFire";
stateSound[14] = RocketLauncherFireSound;
stateEmitter[14] = RocketLauncherfiring1Emitter;
stateEmitterTime[14] = 1.2;
};
| |
using System;
using System.Collections;
using System.Collections.Generic;
using System.Collections.Specialized;
using System.ComponentModel.DataAnnotations;
using System.Globalization;
using System.Reflection;
using System.Runtime.Serialization;
using System.Web.Http;
using System.Web.Http.Description;
using System.Xml.Serialization;
using Newtonsoft.Json;
namespace QiMata.CaptainPlanetFoundation.WebApiApp.Areas.HelpPage.ModelDescriptions
{
/// <summary>
/// Generates model descriptions for given types.
/// </summary>
public class ModelDescriptionGenerator
{
// Modify this to support more data annotation attributes.
private readonly IDictionary<Type, Func<object, string>> AnnotationTextGenerator = new Dictionary<Type, Func<object, string>>
{
{ typeof(RequiredAttribute), a => "Required" },
{ typeof(RangeAttribute), a =>
{
RangeAttribute range = (RangeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Range: inclusive between {0} and {1}", range.Minimum, range.Maximum);
}
},
{ typeof(MaxLengthAttribute), a =>
{
MaxLengthAttribute maxLength = (MaxLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Max length: {0}", maxLength.Length);
}
},
{ typeof(MinLengthAttribute), a =>
{
MinLengthAttribute minLength = (MinLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Min length: {0}", minLength.Length);
}
},
{ typeof(StringLengthAttribute), a =>
{
StringLengthAttribute strLength = (StringLengthAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "String length: inclusive between {0} and {1}", strLength.MinimumLength, strLength.MaximumLength);
}
},
{ typeof(DataTypeAttribute), a =>
{
DataTypeAttribute dataType = (DataTypeAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Data type: {0}", dataType.CustomDataType ?? dataType.DataType.ToString());
}
},
{ typeof(RegularExpressionAttribute), a =>
{
RegularExpressionAttribute regularExpression = (RegularExpressionAttribute)a;
return String.Format(CultureInfo.CurrentCulture, "Matching regular expression pattern: {0}", regularExpression.Pattern);
}
},
};
// Modify this to add more default documentations.
private readonly IDictionary<Type, string> DefaultTypeDocumentation = new Dictionary<Type, string>
{
{ typeof(Int16), "integer" },
{ typeof(Int32), "integer" },
{ typeof(Int64), "integer" },
{ typeof(UInt16), "unsigned integer" },
{ typeof(UInt32), "unsigned integer" },
{ typeof(UInt64), "unsigned integer" },
{ typeof(Byte), "byte" },
{ typeof(Char), "character" },
{ typeof(SByte), "signed byte" },
{ typeof(Uri), "URI" },
{ typeof(Single), "decimal number" },
{ typeof(Double), "decimal number" },
{ typeof(Decimal), "decimal number" },
{ typeof(String), "string" },
{ typeof(Guid), "globally unique identifier" },
{ typeof(TimeSpan), "time interval" },
{ typeof(DateTime), "date" },
{ typeof(DateTimeOffset), "date" },
{ typeof(Boolean), "boolean" },
};
private Lazy<IModelDocumentationProvider> _documentationProvider;
public ModelDescriptionGenerator(HttpConfiguration config)
{
if (config == null)
{
throw new ArgumentNullException("config");
}
_documentationProvider = new Lazy<IModelDocumentationProvider>(() => config.Services.GetDocumentationProvider() as IModelDocumentationProvider);
GeneratedModels = new Dictionary<string, ModelDescription>(StringComparer.OrdinalIgnoreCase);
}
public Dictionary<string, ModelDescription> GeneratedModels { get; private set; }
private IModelDocumentationProvider DocumentationProvider
{
get
{
return _documentationProvider.Value;
}
}
public ModelDescription GetOrCreateModelDescription(Type modelType)
{
if (modelType == null)
{
throw new ArgumentNullException("modelType");
}
Type underlyingType = Nullable.GetUnderlyingType(modelType);
if (underlyingType != null)
{
modelType = underlyingType;
}
ModelDescription modelDescription;
string modelName = ModelNameHelper.GetModelName(modelType);
if (GeneratedModels.TryGetValue(modelName, out modelDescription))
{
if (modelType != modelDescription.ModelType)
{
throw new InvalidOperationException(
String.Format(
CultureInfo.CurrentCulture,
"A model description could not be created. Duplicate model name '{0}' was found for types '{1}' and '{2}'. " +
"Use the [ModelName] attribute to change the model name for at least one of the types so that it has a unique name.",
modelName,
modelDescription.ModelType.FullName,
modelType.FullName));
}
return modelDescription;
}
if (DefaultTypeDocumentation.ContainsKey(modelType))
{
return GenerateSimpleTypeModelDescription(modelType);
}
if (modelType.IsEnum)
{
return GenerateEnumTypeModelDescription(modelType);
}
if (modelType.IsGenericType)
{
Type[] genericArguments = modelType.GetGenericArguments();
if (genericArguments.Length == 1)
{
Type enumerableType = typeof(IEnumerable<>).MakeGenericType(genericArguments);
if (enumerableType.IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, genericArguments[0]);
}
}
if (genericArguments.Length == 2)
{
Type dictionaryType = typeof(IDictionary<,>).MakeGenericType(genericArguments);
if (dictionaryType.IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
Type keyValuePairType = typeof(KeyValuePair<,>).MakeGenericType(genericArguments);
if (keyValuePairType.IsAssignableFrom(modelType))
{
return GenerateKeyValuePairModelDescription(modelType, genericArguments[0], genericArguments[1]);
}
}
}
if (modelType.IsArray)
{
Type elementType = modelType.GetElementType();
return GenerateCollectionModelDescription(modelType, elementType);
}
if (modelType == typeof(NameValueCollection))
{
return GenerateDictionaryModelDescription(modelType, typeof(string), typeof(string));
}
if (typeof(IDictionary).IsAssignableFrom(modelType))
{
return GenerateDictionaryModelDescription(modelType, typeof(object), typeof(object));
}
if (typeof(IEnumerable).IsAssignableFrom(modelType))
{
return GenerateCollectionModelDescription(modelType, typeof(object));
}
return GenerateComplexTypeModelDescription(modelType);
}
// Change this to provide different name for the member.
private static string GetMemberName(MemberInfo member, bool hasDataContractAttribute)
{
JsonPropertyAttribute jsonProperty = member.GetCustomAttribute<JsonPropertyAttribute>();
if (jsonProperty != null && !String.IsNullOrEmpty(jsonProperty.PropertyName))
{
return jsonProperty.PropertyName;
}
if (hasDataContractAttribute)
{
DataMemberAttribute dataMember = member.GetCustomAttribute<DataMemberAttribute>();
if (dataMember != null && !String.IsNullOrEmpty(dataMember.Name))
{
return dataMember.Name;
}
}
return member.Name;
}
private static bool ShouldDisplayMember(MemberInfo member, bool hasDataContractAttribute)
{
JsonIgnoreAttribute jsonIgnore = member.GetCustomAttribute<JsonIgnoreAttribute>();
XmlIgnoreAttribute xmlIgnore = member.GetCustomAttribute<XmlIgnoreAttribute>();
IgnoreDataMemberAttribute ignoreDataMember = member.GetCustomAttribute<IgnoreDataMemberAttribute>();
NonSerializedAttribute nonSerialized = member.GetCustomAttribute<NonSerializedAttribute>();
ApiExplorerSettingsAttribute apiExplorerSetting = member.GetCustomAttribute<ApiExplorerSettingsAttribute>();
bool hasMemberAttribute = member.DeclaringType.IsEnum ?
member.GetCustomAttribute<EnumMemberAttribute>() != null :
member.GetCustomAttribute<DataMemberAttribute>() != null;
// Display member only if all the followings are true:
// no JsonIgnoreAttribute
// no XmlIgnoreAttribute
// no IgnoreDataMemberAttribute
// no NonSerializedAttribute
// no ApiExplorerSettingsAttribute with IgnoreApi set to true
// no DataContractAttribute without DataMemberAttribute or EnumMemberAttribute
return jsonIgnore == null &&
xmlIgnore == null &&
ignoreDataMember == null &&
nonSerialized == null &&
(apiExplorerSetting == null || !apiExplorerSetting.IgnoreApi) &&
(!hasDataContractAttribute || hasMemberAttribute);
}
private string CreateDefaultDocumentation(Type type)
{
string documentation;
if (DefaultTypeDocumentation.TryGetValue(type, out documentation))
{
return documentation;
}
if (DocumentationProvider != null)
{
documentation = DocumentationProvider.GetDocumentation(type);
}
return documentation;
}
private void GenerateAnnotations(MemberInfo property, ParameterDescription propertyModel)
{
List<ParameterAnnotation> annotations = new List<ParameterAnnotation>();
IEnumerable<Attribute> attributes = property.GetCustomAttributes();
foreach (Attribute attribute in attributes)
{
Func<object, string> textGenerator;
if (AnnotationTextGenerator.TryGetValue(attribute.GetType(), out textGenerator))
{
annotations.Add(
new ParameterAnnotation
{
AnnotationAttribute = attribute,
Documentation = textGenerator(attribute)
});
}
}
// Rearrange the annotations
annotations.Sort((x, y) =>
{
// Special-case RequiredAttribute so that it shows up on top
if (x.AnnotationAttribute is RequiredAttribute)
{
return -1;
}
if (y.AnnotationAttribute is RequiredAttribute)
{
return 1;
}
// Sort the rest based on alphabetic order of the documentation
return String.Compare(x.Documentation, y.Documentation, StringComparison.OrdinalIgnoreCase);
});
foreach (ParameterAnnotation annotation in annotations)
{
propertyModel.Annotations.Add(annotation);
}
}
private CollectionModelDescription GenerateCollectionModelDescription(Type modelType, Type elementType)
{
ModelDescription collectionModelDescription = GetOrCreateModelDescription(elementType);
if (collectionModelDescription != null)
{
return new CollectionModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
ElementDescription = collectionModelDescription
};
}
return null;
}
private ModelDescription GenerateComplexTypeModelDescription(Type modelType)
{
ComplexTypeModelDescription complexModelDescription = new ComplexTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(complexModelDescription.Name, complexModelDescription);
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
PropertyInfo[] properties = modelType.GetProperties(BindingFlags.Public | BindingFlags.Instance);
foreach (PropertyInfo property in properties)
{
if (ShouldDisplayMember(property, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(property, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(property);
}
GenerateAnnotations(property, propertyModel);
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(property.PropertyType);
}
}
FieldInfo[] fields = modelType.GetFields(BindingFlags.Public | BindingFlags.Instance);
foreach (FieldInfo field in fields)
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
ParameterDescription propertyModel = new ParameterDescription
{
Name = GetMemberName(field, hasDataContractAttribute)
};
if (DocumentationProvider != null)
{
propertyModel.Documentation = DocumentationProvider.GetDocumentation(field);
}
complexModelDescription.Properties.Add(propertyModel);
propertyModel.TypeDescription = GetOrCreateModelDescription(field.FieldType);
}
}
return complexModelDescription;
}
private DictionaryModelDescription GenerateDictionaryModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new DictionaryModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private EnumTypeModelDescription GenerateEnumTypeModelDescription(Type modelType)
{
EnumTypeModelDescription enumDescription = new EnumTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
bool hasDataContractAttribute = modelType.GetCustomAttribute<DataContractAttribute>() != null;
foreach (FieldInfo field in modelType.GetFields(BindingFlags.Public | BindingFlags.Static))
{
if (ShouldDisplayMember(field, hasDataContractAttribute))
{
EnumValueDescription enumValue = new EnumValueDescription
{
Name = field.Name,
Value = field.GetRawConstantValue().ToString()
};
if (DocumentationProvider != null)
{
enumValue.Documentation = DocumentationProvider.GetDocumentation(field);
}
enumDescription.Values.Add(enumValue);
}
}
GeneratedModels.Add(enumDescription.Name, enumDescription);
return enumDescription;
}
private KeyValuePairModelDescription GenerateKeyValuePairModelDescription(Type modelType, Type keyType, Type valueType)
{
ModelDescription keyModelDescription = GetOrCreateModelDescription(keyType);
ModelDescription valueModelDescription = GetOrCreateModelDescription(valueType);
return new KeyValuePairModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
KeyModelDescription = keyModelDescription,
ValueModelDescription = valueModelDescription
};
}
private ModelDescription GenerateSimpleTypeModelDescription(Type modelType)
{
SimpleTypeModelDescription simpleModelDescription = new SimpleTypeModelDescription
{
Name = ModelNameHelper.GetModelName(modelType),
ModelType = modelType,
Documentation = CreateDefaultDocumentation(modelType)
};
GeneratedModels.Add(simpleModelDescription.Name, simpleModelDescription);
return simpleModelDescription;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Text;
namespace FileHelpers
{
/// <summary>
/// This class allow you to convert the records of a file to a different record format.
/// </summary>
/// <typeparam name="TSource">The source record type.</typeparam>
/// <typeparam name="TDestination">The destination record type.</typeparam>
[DebuggerDisplay(
"FileTransformanEngine for types: {SourceType.Name} --> {DestinationType.Name}. Source Encoding: {SourceEncoding.EncodingName}. Destination Encoding: {DestinationEncoding.EncodingName}"
)]
public sealed class FileTransformEngine<TSource, TDestination>
where TSource : class, ITransformable<TDestination>
where TDestination : class
{
#region " Constructor "
/// <summary>Create a new FileTransformEngine.</summary>
public FileTransformEngine() {}
#endregion
#region " Private Fields "
// [DebuggerBrowsable(DebuggerBrowsableState.Never)]
// private static object[] mEmptyArray = new object[] {};
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private Encoding mSourceEncoding = Encoding.GetEncoding(0);
[DebuggerBrowsable(DebuggerBrowsableState.Never)]
private Encoding mDestinationEncoding = Encoding.GetEncoding(0);
private ErrorMode mErrorMode;
/// <summary>Indicates the behavior of the engine when an error is found.</summary>
public ErrorMode ErrorMode
{
get { return mErrorMode; }
set
{
mErrorMode = value;
mSourceErrorManager = new ErrorManager(value);
mDestinationErrorManager = new ErrorManager(value);
}
}
private ErrorManager mSourceErrorManager = new ErrorManager();
/// <summary>
/// Allow access the <see cref="ErrorManager"/> of the engine used to
/// read the source file, is null before any file is transformed
/// </summary>
public ErrorManager SourceErrorManager
{
get { return mSourceErrorManager; }
}
private ErrorManager mDestinationErrorManager = new ErrorManager();
/// <summary>
/// Allow access the <see cref="ErrorManager"/> of the engine used to
/// write the destination file, is null before any file is transformed
/// </summary>
public ErrorManager DestinationErrorManager
{
get { return mDestinationErrorManager; }
}
#endregion
#region " TransformFile "
/// <summary>
/// Transform the contents of the sourceFile and write them to the
/// destFile.(use only if you need the array of the transformed
/// records, TransformFileFast is faster)
/// </summary>
/// <param name="sourceFile">The source file.</param>
/// <param name="destFile">The destination file.</param>
/// <returns>The transformed records.</returns>
public TDestination[] TransformFile(string sourceFile, string destFile)
{
ExHelper.CheckNullParam(sourceFile, "sourceFile");
ExHelper.CheckNullParam(destFile, "destFile");
ExHelper.CheckDifferentsParams(sourceFile, "sourceFile", destFile, "destFile");
return CoreTransformFile(sourceFile, destFile);
}
/// <summary>
/// Transform the contents of the sourceFile and write them to the
/// destFile. (faster and uses less memory, best choice for big
/// files)
/// </summary>
/// <param name="sourceFile">The source file.</param>
/// <param name="destFile">The destination file.</param>
/// <returns>The number of transformed records.</returns>
public int TransformFileFast(string sourceFile, string destFile)
{
ExHelper.CheckNullParam(sourceFile, "sourceFile");
ExHelper.CheckNullParam(destFile, "destFile");
ExHelper.CheckDifferentsParams(sourceFile, "sourceFile", destFile, "destFile");
return
CoreTransformAsync(
new InternalStreamReader(sourceFile, SourceEncoding, true, EngineBase.DefaultReadBufferSize*5),
new StreamWriter(destFile, false, DestinationEncoding, EngineBase.DefaultWriteBufferSize*5));
}
/// <summary>
/// Transform the contents of the sourceFile and write them to the
/// destFile. (faster and uses less memory, best choice for big
/// files)
/// </summary>
/// <param name="sourceStream">The source stream.</param>
/// <param name="destFile">The destination file.</param>
/// <returns>The number of transformed records.</returns>
public int TransformFileFast(TextReader sourceStream, string destFile)
{
ExHelper.CheckNullParam(sourceStream, "sourceStream");
ExHelper.CheckNullParam(destFile, "destFile");
return CoreTransformAsync(sourceStream,
new StreamWriter(destFile, false, DestinationEncoding, EngineBase.DefaultWriteBufferSize*5));
}
/// <summary>
/// Transform the contents of the sourceFile and write them to the
/// destFile. (faster and uses less memory, best choice for big
/// files)
/// </summary>
/// <param name="sourceStream">The source stream.</param>
/// <param name="destStream">The destination stream.</param>
/// <returns>The number of transformed records.</returns>
public int TransformFileFast(TextReader sourceStream, StreamWriter destStream)
{
ExHelper.CheckNullParam(sourceStream, "sourceStream");
ExHelper.CheckNullParam(destStream, "destStream");
return CoreTransformAsync(sourceStream, destStream);
}
/// <summary>
/// Transform the contents of the sourceFile and write them to the
/// destFile. (faster and uses less memory, best choice for big
/// files)
/// </summary>
/// <param name="sourceFile">The source file.</param>
/// <param name="destStream">The destination stream.</param>
/// <returns>The number of transformed records.</returns>
public int TransformFileFast(string sourceFile, StreamWriter destStream)
{
ExHelper.CheckNullParam(sourceFile, "sourceFile");
ExHelper.CheckNullParam(destStream, "destStream");
return
CoreTransformAsync(
new InternalStreamReader(sourceFile, SourceEncoding, true, EngineBase.DefaultReadBufferSize*5),
destStream);
}
#endregion
// public string TransformString(string sourceData)
// {
// if (mConvert1to2 == null)
// throw new BadUsageException("You must define a method in the class " + SourceType.Name + " with the attribute [TransfortToRecord(typeof(" + DestinationType.Name + "))] that return an object of type " + DestinationType.Name);
//
// return CoreTransformAsync(sourceFile, destFile, mSourceType, mDestinationType, mConvert1to2);
// }
/// <summary>
/// Transforms an array of records from the source type to the destination type
/// </summary>
/// <param name="sourceRecords">An array of the source records.</param>
/// <returns>The transformed records.</returns>
public TDestination[] TransformRecords(TSource[] sourceRecords)
{
return CoreTransformRecords(sourceRecords);
//return CoreTransformAsync(sourceFile, destFile, mSourceType, mDestinationType, mConvert1to2);
}
/// <summary>
/// Transform a file that contains source records to an array of the destination type
/// </summary>
/// <param name="sourceFile">A file containing the source records.</param>
/// <returns>The transformed records.</returns>
public TDestination[] ReadAndTransformRecords(string sourceFile)
{
var engine = new FileHelperAsyncEngine<TSource>(mSourceEncoding) {
ErrorMode = this.ErrorMode
};
mSourceErrorManager = engine.ErrorManager;
mDestinationErrorManager = new ErrorManager(ErrorMode);
var res = new List<TDestination>();
engine.BeginReadFile(sourceFile);
foreach (var record in engine)
res.Add(record.TransformTo());
engine.Close();
return res.ToArray();
}
#region " Transform Internal Methods "
private TDestination[] CoreTransform(InternalStreamReader sourceFile, StreamWriter destFile)
{
var sourceEngine = new FileHelperEngine<TSource>(mSourceEncoding);
var destEngine = new FileHelperEngine<TDestination>(mDestinationEncoding);
sourceEngine.ErrorMode = this.ErrorMode;
destEngine.ErrorManager.ErrorMode = this.ErrorMode;
mSourceErrorManager = sourceEngine.ErrorManager;
mDestinationErrorManager = destEngine.ErrorManager;
TSource[] source = sourceEngine.ReadStream(sourceFile);
TDestination[] transformed = CoreTransformRecords(source);
destEngine.WriteStream(destFile, transformed);
return transformed;
}
private TDestination[] CoreTransformRecords(TSource[] sourceRecords)
{
var res = new List<TDestination>(sourceRecords.Length);
for (int i = 0; i < sourceRecords.Length; i++)
res.Add(sourceRecords[i].TransformTo());
return res.ToArray();
}
private TDestination[] CoreTransformFile(string sourceFile, string destFile)
{
TDestination[] tempRes;
using (
var fs = new InternalStreamReader(sourceFile, mSourceEncoding, true, EngineBase.DefaultReadBufferSize*10)
) {
using (
var ds = new StreamWriter(destFile,
false,
mDestinationEncoding,
EngineBase.DefaultWriteBufferSize*10)) {
tempRes = CoreTransform(fs, ds);
ds.Close();
}
fs.Close();
}
return tempRes;
}
private int CoreTransformAsync(TextReader sourceFile, StreamWriter destFile)
{
var sourceEngine = new FileHelperAsyncEngine<TSource>();
var destEngine = new FileHelperAsyncEngine<TDestination>();
sourceEngine.ErrorMode = this.ErrorMode;
destEngine.ErrorMode = this.ErrorMode;
mSourceErrorManager = sourceEngine.ErrorManager;
mDestinationErrorManager = destEngine.ErrorManager;
sourceEngine.Encoding = mSourceEncoding;
destEngine.Encoding = mDestinationEncoding;
sourceEngine.BeginReadStream(sourceFile);
destEngine.BeginWriteStream(destFile);
foreach (var record in sourceEngine)
destEngine.WriteNext(record.TransformTo());
sourceEngine.Close();
destEngine.Close();
return sourceEngine.TotalRecords;
}
#endregion
#region " Properties "
/// <summary>The source record Type.</summary>
public Type SourceType
{
get { return typeof (TSource); }
}
/// <summary>The destination record Type.</summary>
public Type DestinationType
{
get { return typeof (TDestination); }
}
/// <summary>The Encoding of the Source File.</summary>
public Encoding SourceEncoding
{
get { return mSourceEncoding; }
set { mSourceEncoding = value; }
}
/// <summary>The Encoding of the Destination File.</summary>
public Encoding DestinationEncoding
{
get { return mDestinationEncoding; }
set { mDestinationEncoding = value; }
}
#endregion
}
}
| |
//
// Filmstrip.cs
//
// Author:
// Ruben Vermeersch <[email protected]>
// Lorenzo Milesi <[email protected]>
// Stephane Delcroix <[email protected]>
//
// Copyright (C) 2008-2010 Novell, Inc.
// Copyright (C) 2008, 2010 Ruben Vermeersch
// Copyright (C) 2008-2009 Lorenzo Milesi
// Copyright (C) 2008-2009 Stephane Delcroix
//
// Permission is hereby granted, free of charge, to any person obtaining
// a copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to
// permit persons to whom the Software is furnished to do so, subject to
// the following conditions:
//
// The above copyright notice and this permission notice shall be
// included in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED AS IS, WITHOUT WARRANTY OF ANY KIND,
// EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
// NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
// LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
// OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
// WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
//
//TODO:
// * only redraw required parts on ExposeEvents (low)
// * Handle orientation changes (low) (require gtk# changes, so I can trigger an OrientationChanged event)
using System;
using System.Collections;
using Gtk;
using Gdk;
using FSpot.Core;
using FSpot.Utils;
using FSpot.Platform;
using FSpot.Bling;
using Hyena;
namespace FSpot.Widgets
{
public class Filmstrip : EventBox, IDisposable
{
// public event OrientationChangedHandler OrientationChanged;
public event EventHandler PositionChanged;
DoubleAnimation animation;
bool extendable = true;
public bool Extendable {
get { return extendable; }
set { extendable = value; }
}
Orientation orientation = Orientation.Horizontal;
public Orientation Orientation {
get { return orientation; }
set {
if (orientation == value)
return;
BackgroundPixbuf = null;
orientation = value;
// if (OrientationChanged != null) {
// OrientationChangedArgs args = new OrientationChangedArgs ();
// args.Orientation = value;
// OrientationChanged (this, args);
// }
}
}
int spacing = 6;
public int Spacing {
get { return spacing; }
set {
if (value < 0)
throw new ArgumentException ("Spacing is negative!");
spacing = value;
}
}
int thumb_offset = 17;
public int ThumbOffset {
get { return thumb_offset; }
set {
if (value < 0)
throw new ArgumentException ("ThumbOffset is negative!");
thumb_offset = value;
}
}
int thumb_size = 67;
public int ThumbSize {
get { return thumb_size; }
set {
if (value < 0)
throw new ArgumentException ("ThumbSize is negative!");
thumb_size = value;
}
}
bool squared_thumbs = false;
public bool SquaredThumbs {
get { return squared_thumbs; }
set { squared_thumbs = value; }
}
Pixbuf background_tile;
public Pixbuf BackgroundTile {
get {
if (background_tile == null) {
background_tile = new Gdk.Pixbuf (Gdk.Colorspace.Rgb, true, 8, 1, 77);
background_tile.Fill (0x00000000);
}
if (Orientation == Orientation.Horizontal && background_tile.Height < background_tile.Width)
background_tile = background_tile.RotateSimple (PixbufRotation.Counterclockwise);
else if (Orientation == Orientation.Vertical && background_tile.Width < background_tile.Height)
background_tile = background_tile.RotateSimple (PixbufRotation.Clockwise);
return background_tile;
}
set {
if (background_tile != value && background_tile != null)
background_tile.Dispose ();
background_tile = value;
BackgroundPixbuf = null;
}
}
int x_offset = 2;
public int XOffset {
get { return x_offset; }
set {
if (value < 0)
throw new ArgumentException ("value is negative!");
x_offset = value;
}
}
int y_offset = 2;
public int YOffset {
get { return y_offset; }
set {
if (value < 0)
throw new ArgumentException ("value is negative!");
y_offset = value;
}
}
float x_align = 0.5f, y_align = 0.5f;
public float XAlign {
get { return x_align; }
set {
if (value < 0.0 || value > 1.0)
throw new ArgumentException ("value is not between 0.0 and 1.0");
x_align = value;
}
}
public float YAlign {
get { return y_align; }
set {
if (value < 0.0 || value > 1.0)
throw new ArgumentException ("value is not between 0.0 and 1.0");
y_align = value;
}
}
public int ActiveItem {
get { return selection.Index; }
set {
if (value == selection.Index)
return;
if (value < 0)
value = 0;
if (value > selection.Collection.Count - 1)
value = selection.Collection.Count - 1;
selection.Index = value;
}
}
double position;
public double Position {
get {
return position;
}
set {
if (value == position)
return;
if (value < 0)
value = 0;
if (value > selection.Collection.Count - 1)
value = selection.Collection.Count - 1;
animation.From = position;
animation.To = value;
animation.Restart ();
if (PositionChanged != null)
PositionChanged (this, EventArgs.Empty);
}
}
BrowsablePointer selection;
DisposableCache<SafeUri, Pixbuf> thumb_cache;
public Filmstrip (BrowsablePointer selection) : this (selection, true)
{
}
public Filmstrip (BrowsablePointer selection, bool squared_thumbs) : base ()
{
CanFocus = true;
this.selection = selection;
this.selection.Changed += HandlePointerChanged;
this.selection.Collection.Changed += HandleCollectionChanged;
this.selection.Collection.ItemsChanged += HandleCollectionItemsChanged;
this.squared_thumbs = squared_thumbs;
thumb_cache = new DisposableCache<SafeUri, Pixbuf> (30);
ThumbnailLoader.Default.OnPixbufLoaded += HandlePixbufLoaded;
animation = new DoubleAnimation (0, 0, TimeSpan.FromSeconds (1.5), SetPositionCore, new CubicEase (EasingMode.EaseOut));
}
int min_length = 400;
int min_height = 200;
protected override void OnSizeRequested (ref Gtk.Requisition requisition)
{
base.OnSizeRequested (ref requisition);
requisition.Width = (Orientation == Orientation.Horizontal ? min_length : BackgroundTile.Width) + 2 * x_offset;
requisition.Height = (Orientation == Orientation.Vertical ? min_height : BackgroundTile.Height) + 2 * y_offset;
switch (Orientation) {
case Orientation.Horizontal:
if (min_length % BackgroundTile.Width != 0)
requisition.Width += BackgroundTile.Width - min_length % BackgroundTile.Width;
break;
case Orientation.Vertical:
if (min_height % BackgroundTile.Height != 0)
requisition.Height += BackgroundTile.Height - min_height % BackgroundTile.Height;
break;
}
}
Pixbuf background_pixbuf;
protected Pixbuf BackgroundPixbuf {
get {
if (background_pixbuf == null) {
int length = BackgroundTile.Width;
int height = BackgroundTile.Height;
switch (Orientation) {
case Orientation.Horizontal:
if (Allocation.Width < min_length || !extendable)
length = min_length;
else
length = Allocation.Width;
length = length - length % BackgroundTile.Width;
break;
case Orientation.Vertical:
if (Allocation.Height < min_height || !extendable)
height = min_height;
else
height = Allocation.Height;
height = height - height % BackgroundTile.Height;
break;
}
background_pixbuf = new Pixbuf (Gdk.Colorspace.Rgb, true, 8, length, height);
switch (Orientation) {
case Orientation.Horizontal:
for (int i = 0; i < length; i += BackgroundTile.Width) {
BackgroundTile.CopyArea (0, 0, BackgroundTile.Width, BackgroundTile.Height,
background_pixbuf, i, 0);
}
break;
case Orientation.Vertical:
for (int i = 0; i < height; i += BackgroundTile.Height) {
BackgroundTile.CopyArea (0, 0, BackgroundTile.Width, BackgroundTile.Height,
background_pixbuf, 0, i);
}
break;
}
}
return background_pixbuf;
}
set {
if (background_pixbuf != value && background_pixbuf != null) {
background_pixbuf.Dispose ();
background_pixbuf = value;
}
}
}
Hashtable start_indexes;
int filmstrip_start_pos;
int filmstrip_end_pos;
protected override bool OnExposeEvent (EventExpose evnt)
{
if (evnt.Window != GdkWindow)
return true;
if (selection.Collection.Count == 0)
return true;
if (Orientation == Orientation.Horizontal && (extendable && Allocation.Width >= BackgroundPixbuf.Width + (2 * x_offset) + BackgroundTile.Width) ||
Orientation == Orientation.Vertical && (extendable && Allocation.Height >= BackgroundPixbuf.Height + (2 * y_offset) + BackgroundTile.Height) )
BackgroundPixbuf = null;
if ( Orientation == Orientation.Horizontal && (extendable && Allocation.Width < BackgroundPixbuf.Width + (2 * x_offset) ) ||
Orientation == Orientation.Vertical && ( extendable && Allocation.Height < BackgroundPixbuf.Height + (2 * y_offset) ))
BackgroundPixbuf = null;
int xpad = 0, ypad = 0;
if (Allocation.Width > BackgroundPixbuf.Width + (2 * x_offset))
xpad = (int) (x_align * (Allocation.Width - (BackgroundPixbuf.Width + (2 * x_offset))));
if (Allocation.Height > BackgroundPixbuf.Height + (2 * y_offset))
ypad = (int) (y_align * (Allocation.Height - (BackgroundPixbuf.Height + (2 * y_offset))));
GdkWindow.DrawPixbuf (Style.BackgroundGC (StateType.Normal), BackgroundPixbuf,
0, 0, x_offset + xpad, y_offset + ypad,
BackgroundPixbuf.Width, BackgroundPixbuf.Height, Gdk.RgbDither.None, 0, 0);
//drawing the icons...
start_indexes = new Hashtable ();
Pixbuf icon_pixbuf = null;
if (Orientation == Orientation.Horizontal)
icon_pixbuf = new Pixbuf (Gdk.Colorspace.Rgb, true, 8, BackgroundPixbuf.Width, thumb_size);
else if (Orientation == Orientation.Vertical)
icon_pixbuf = new Pixbuf (Gdk.Colorspace.Rgb, true, 8, thumb_size, BackgroundPixbuf.Height);
icon_pixbuf.Fill (0x00000000);
Pixbuf current = GetPixbuf ((int) Math.Round (Position));
int ref_x = (int)(icon_pixbuf.Width / 2.0 - current.Width * (Position + 0.5f - Math.Round (Position))); //xpos of the reference icon
int ref_y = (int)(icon_pixbuf.Height / 2.0 - current.Height * (Position + 0.5f - Math.Round (Position)));
int start_x = Orientation == Orientation.Horizontal ? ref_x : 0;
int start_y = Orientation == Orientation.Vertical ? ref_y : 0;
for (int i = (int) Math.Round (Position); i < selection.Collection.Count; i++) {
current = GetPixbuf (i, ActiveItem == i);
if (Orientation == Orientation.Horizontal) {
current.CopyArea (0, 0, Math.Min (current.Width, icon_pixbuf.Width - start_x) , current.Height, icon_pixbuf, start_x, start_y);
start_indexes [start_x] = i;
start_x += current.Width + spacing;
if (start_x > icon_pixbuf.Width)
break;
} else if (Orientation == Orientation.Vertical) {
current.CopyArea (0, 0, current.Width, Math.Min (current.Height, icon_pixbuf.Height - start_y), icon_pixbuf, start_x, start_y);
start_indexes [start_y] = i;
start_y += current.Height + spacing;
if (start_y > icon_pixbuf.Height)
break;
}
}
filmstrip_end_pos = (Orientation == Orientation.Horizontal ? start_x : start_y);
start_x = Orientation == Orientation.Horizontal ? ref_x : 0;
start_y = Orientation == Orientation.Vertical ? ref_y : 0;
for (int i = (int) Math.Round (Position) - 1; i >= 0; i--) {
current = GetPixbuf (i, ActiveItem == i);
if (Orientation == Orientation.Horizontal) {
start_x -= (current.Width + spacing);
current.CopyArea (Math.Max (0, -start_x), 0, Math.Min (current.Width, current.Width + start_x), current.Height, icon_pixbuf, Math.Max (start_x, 0), 0);
start_indexes [Math.Max (0, start_x)] = i;
if (start_x < 0)
break;
} else if (Orientation == Orientation.Vertical) {
start_y -= (current.Height + spacing);
current.CopyArea (0, Math.Max (0, -start_y), current.Width, Math.Min (current.Height, current.Height + start_y), icon_pixbuf, 0, Math.Max (start_y, 0));
start_indexes [Math.Max (0, start_y)] = i;
if (start_y < 0)
break;
}
}
filmstrip_start_pos = Orientation == Orientation.Horizontal ? start_x : start_y;
GdkWindow.DrawPixbuf (Style.BackgroundGC (StateType.Normal), icon_pixbuf,
0, 0, x_offset + xpad, y_offset + ypad + thumb_offset,
icon_pixbuf.Width, icon_pixbuf.Height, Gdk.RgbDither.None, 0, 0);
icon_pixbuf.Dispose ();
return true;
}
protected override bool OnScrollEvent (EventScroll args)
{
float shift = 1f;
if ((args.State & Gdk.ModifierType.ShiftMask) > 0)
shift = 6f;
switch (args.Direction) {
case ScrollDirection.Up:
case ScrollDirection.Right:
Position = animation.To - shift;
return true;
case Gdk.ScrollDirection.Down:
case Gdk.ScrollDirection.Left:
Position = animation.To + shift;
return true;
}
return false;
}
protected override bool OnKeyPressEvent (Gdk.EventKey ek)
{
switch (ek.Key) {
case Gdk.Key.Page_Down:
case Gdk.Key.Down:
case Gdk.Key.Right:
ActiveItem ++;
return true;
case Gdk.Key.Page_Up:
case Gdk.Key.Up:
case Gdk.Key.Left:
ActiveItem --;
return true;
}
return false;
}
protected virtual void SetPositionCore (double position)
{
if (this.position == position)
return;
if (position < 0)
position = 0;
if (position > selection.Collection.Count - 1)
position = selection.Collection.Count - 1;
this.position = position;
QueueDraw ();
}
void HandlePointerChanged (object sender, BrowsablePointerChangedEventArgs args)
{
Position = ActiveItem;
}
void HandleCollectionChanged (IBrowsableCollection coll)
{
this.position = ActiveItem;
QueueDraw ();
}
void HandleCollectionItemsChanged (IBrowsableCollection coll, BrowsableEventArgs args)
{
if (!args.Changes.DataChanged)
return;
foreach (int item in args.Items)
thumb_cache.TryRemove ((selection.Collection [item]).DefaultVersion.Uri);
//FIXME call QueueDrawArea
QueueDraw ();
}
void HandlePixbufLoaded (ImageLoaderThread pl, ImageLoaderThread.RequestItem item) {
if (!thumb_cache.Contains (item.Uri)) {
return;
}
//FIXME use QueueDrawArea
//FIXME only invalidate if displayed
QueueDraw ();
}
protected override bool OnPopupMenu ()
{
DrawOrientationMenu (null);
return true;
}
private bool DrawOrientationMenu (Gdk.EventButton args)
{
Gtk.Menu placement_menu = new Gtk.Menu ();
GtkUtil.MakeCheckMenuItem (placement_menu,
Mono.Unix.Catalog.GetString ("_Horizontal"),
App.Instance.Organizer.HandleFilmstripHorizontal,
true, Orientation == Orientation.Horizontal, true);
GtkUtil.MakeCheckMenuItem (placement_menu,
Mono.Unix.Catalog.GetString ("_Vertical"),
App.Instance.Organizer.HandleFilmstripVertical,
true, Orientation == Orientation.Vertical, true);
if (args != null)
placement_menu.Popup (null, null, null, args.Button, args.Time);
else
placement_menu.Popup (null, null, null, 0, Gtk.Global.CurrentEventTime);
return true;
}
protected override bool OnButtonPressEvent (EventButton evnt)
{
if (evnt.Button == 3)
return DrawOrientationMenu (evnt);
if (evnt.Button != 1 || (
(Orientation == Orientation.Horizontal && (evnt.X > filmstrip_end_pos || evnt.X < filmstrip_start_pos)) ||
(Orientation == Orientation.Vertical && (evnt.Y > filmstrip_end_pos || evnt.Y < filmstrip_start_pos))
))
return false;
HasFocus = true;
int pos = -1;
foreach (int key in start_indexes.Keys)
if (key <= (Orientation == Orientation.Horizontal ? evnt.X : evnt.Y) && key > pos)
pos = key;
ActiveItem = (int)start_indexes [pos];
return true;
}
protected Pixbuf GetPixbuf (int i)
{
return GetPixbuf (i, false);
}
protected virtual Pixbuf GetPixbuf (int i, bool highlighted)
{
Pixbuf current = null;
SafeUri uri = (selection.Collection [i]).DefaultVersion.Uri;
try {
var pixbuf = thumb_cache.Get (uri);
if (pixbuf != null)
current = pixbuf.ShallowCopy ();
} catch (IndexOutOfRangeException) {
current = null;
}
if (current == null) {
var pixbuf = XdgThumbnailSpec.LoadThumbnail (uri, ThumbnailSize.Large, null);
if (pixbuf == null) {
ThumbnailLoader.Default.Request (uri, ThumbnailSize.Large, 0);
current = FSpot.Core.Global.IconTheme.LoadIcon ("gtk-missing-image", ThumbSize, (Gtk.IconLookupFlags)0);
} else {
if (SquaredThumbs) {
current = PixbufUtils.IconFromPixbuf (pixbuf, ThumbSize);
} else {
current = pixbuf.ScaleSimple (ThumbSize, ThumbSize, InterpType.Nearest);
}
pixbuf.Dispose ();
thumb_cache.Add (uri, current);
}
}
//FIXME: we might end up leaking a pixbuf here
Cms.Profile screen_profile;
if (FSpot.ColorManagement.Profiles.TryGetValue (Preferences.Get<string> (Preferences.COLOR_MANAGEMENT_DISPLAY_PROFILE), out screen_profile)) {
Pixbuf t = current.Copy ();
current = t;
FSpot.ColorManagement.ApplyProfile (current, screen_profile);
}
// Add a four pixel white border around the thumbnail
Pixbuf whiteBorder = new Pixbuf (Gdk.Colorspace.Rgb, true, 8, current.Width, current.Height);
whiteBorder.Fill (0);
current.CopyArea (1, 1, current.Width - 8, current.Height - 8, whiteBorder, 4, 4);
current = whiteBorder;
if (!highlighted)
return current;
Pixbuf highlight = new Pixbuf (Gdk.Colorspace.Rgb, true, 8, current.Width, current.Height);
highlight.Fill (ColorToInt (Style.Light (StateType.Selected)));
// Add a two pixel highlight around the thumbnail
current.CopyArea (2, 2, current.Width - 4, current.Height - 4, highlight, 2, 2);
return highlight;
}
private static uint ColorToInt(Gdk.Color color) {
return (uint)((uint)color.Red / 256 << 24 ) + ((uint)color.Green / 256 << 16) + ((uint)color.Blue / 256 << 8) + 255;
}
~Filmstrip ()
{
Log.DebugFormat ("Finalizer called on {0}. Should be Disposed", GetType ());
Dispose (false);
}
public override void Dispose ()
{
Dispose (true);
base.Dispose ();
System.GC.SuppressFinalize (this);
}
bool is_disposed = false;
protected virtual void Dispose (bool disposing)
{
if (is_disposed)
return;
if (disposing) {
this.selection.Changed -= HandlePointerChanged;
this.selection.Collection.Changed -= HandleCollectionChanged;
this.selection.Collection.ItemsChanged -= HandleCollectionItemsChanged;
ThumbnailLoader.Default.OnPixbufLoaded -= HandlePixbufLoaded;
if (background_pixbuf != null)
background_pixbuf.Dispose ();
if (background_tile != null)
background_tile.Dispose ();
thumb_cache.Dispose ();
}
//Free unmanaged resources
is_disposed = true;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Globalization;
using System.IO;
using System.Net;
using System.Security.Cryptography;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
namespace Nowin
{
class Transport2HttpHandler : ITransportLayerHandler, IHttpLayerCallback
{
readonly IHttpLayerHandler _next;
public readonly int StartBufferOffset;
public readonly int ReceiveBufferSize;
public readonly int ResponseBodyBufferOffset;
readonly int _constantsOffset;
readonly byte[] _buffer;
public int ReceiveBufferPos;
int _receiveBufferFullness;
bool _waitingForRequest;
bool _isHttp10;
bool _isKeepAlive;
public bool ShouldSend100Continue;
public ulong RequestContentLength;
public bool RequestIsChunked;
bool _responseHeadersSend;
readonly bool _isSsl;
readonly string _serverName;
readonly IDateHeaderValueProvider _dateProvider;
readonly IIpIsLocalChecker _ipIsLocalChecker;
readonly ReqRespStream _reqRespStream;
volatile TaskCompletionSource<bool> _tcsSend;
CancellationTokenSource _cancellation;
int _responseHeaderPos;
bool _lastPacket;
bool _responseIsChunked;
ulong _responseContentLength;
IPEndPoint _remoteEndPoint;
IPEndPoint _localEndPoint;
string _requestPath;
string _requestQueryString;
string _requestMethod;
string _requestScheme;
string _requestProtocol;
string _remoteIpAddress;
string _remotePort;
bool _isLocal;
bool _knownIsLocal;
string _localIpAddress;
string _localPort;
int _statusCode;
string _reasonPhase;
readonly List<KeyValuePair<string, object>> _responseHeaders = new List<KeyValuePair<string, object>>();
readonly ThreadLocal<char[]> _charBuffer;
readonly int _handlerId;
[Flags]
enum WebSocketReqConditions
{
Start = 0,
GetMethod = 1,
UpgradeWebSocket = 2,
ConnectionUpgrade = 4,
Version13 = 8,
ValidKey = 16,
AllSatisfied = 31
}
WebSocketReqConditions _webSocketReqCondition;
string _webSocketKey;
bool _isWebSocket;
bool _startedReceiveData;
int _disconnecting;
bool _serverNameOverwrite;
bool _dateOverwrite;
public Transport2HttpHandler(IHttpLayerHandler next, bool isSsl, string serverName, IDateHeaderValueProvider dateProvider, IIpIsLocalChecker ipIsLocalChecker, byte[] buffer, int startBufferOffset, int receiveBufferSize, int constantsOffset, ThreadLocal<char[]> charBuffer, int handlerId)
{
_next = next;
StartBufferOffset = startBufferOffset;
ReceiveBufferSize = receiveBufferSize;
ResponseBodyBufferOffset = StartBufferOffset + ReceiveBufferSize * 2 + 8;
_constantsOffset = constantsOffset;
_charBuffer = charBuffer;
_handlerId = handlerId;
_buffer = buffer;
_isSsl = isSsl;
_serverName = serverName;
_dateProvider = dateProvider;
_ipIsLocalChecker = ipIsLocalChecker;
_cancellation = new CancellationTokenSource();
_reqRespStream = new ReqRespStream(this);
_next.Callback = this;
}
public int ReceiveBufferDataLength
{
get { return _receiveBufferFullness - StartBufferOffset - ReceiveBufferPos; }
}
void ParseRequest(byte[] buffer, int startBufferOffset, int posOfReqEnd)
{
_isWebSocket = false;
_next.PrepareForRequest();
posOfReqEnd -= 2;
_responseHeaders.Clear();
_webSocketReqCondition = WebSocketReqConditions.Start;
if (_cancellation.IsCancellationRequested)
_cancellation = new CancellationTokenSource();
_responseIsChunked = false;
_responseContentLength = ulong.MaxValue;
var pos = startBufferOffset;
_requestMethod = ParseHttpMethod(buffer, ref pos);
_requestScheme = _isSsl ? "https" : "http";
string reqHost;
ParseHttpPath(buffer, ref pos, out _requestPath, out _requestQueryString, ref _requestScheme, out reqHost);
ParseHttpProtocol(buffer, ref pos, out _requestProtocol);
if (!SkipCrLf(buffer, ref pos)) throw new Exception("Request line does not end with CRLF");
_isKeepAlive = !_isHttp10;
ShouldSend100Continue = false;
RequestContentLength = 0;
RequestIsChunked = false;
if (!ParseHttpHeaders(buffer, pos, posOfReqEnd)) throw new Exception("Request headers cannot be parsed");
}
bool ParseHttpHeaders(byte[] buffer, int pos, int posOfReqEnd)
{
var name = "";
while (pos < posOfReqEnd)
{
int start;
var newHeaderKey = false;
if (!IsSpaceOrTab(buffer[pos]))
{
start = pos;
SkipTokenChars(buffer, ref pos);
if (buffer[pos] != ':') return false;
name = StringFromLatin1(buffer, start, pos);
newHeaderKey = true;
}
pos++;
SkipSpacesOrTabs(buffer, ref pos);
start = pos;
SkipToCR(buffer, ref pos);
var value = StringFromLatin1(buffer, start, pos);
if (newHeaderKey)
ProcessRequestHeader(name, value);
else
_next.AddRequestHeader(name, value);
SkipCrLf(buffer, ref pos);
}
return true;
}
void ProcessRequestHeader(string name, string value)
{
if (name.Equals("Connection", StringComparison.OrdinalIgnoreCase))
{
if (_isHttp10)
{
if (value.Equals("Keep-Alive", StringComparison.OrdinalIgnoreCase))
{
_isKeepAlive = true;
}
}
else
{
if (value.Equals("Close", StringComparison.OrdinalIgnoreCase))
{
_isKeepAlive = false;
}
else if (value.IndexOf("Upgrade", StringComparison.OrdinalIgnoreCase) >= 0)
{
_webSocketReqCondition |= WebSocketReqConditions.ConnectionUpgrade;
}
}
}
else if (name.Equals("Expect", StringComparison.OrdinalIgnoreCase))
{
if (value.Equals("100-Continue", StringComparison.OrdinalIgnoreCase))
{
ShouldSend100Continue = true;
}
}
else if (name.Equals("Content-Length", StringComparison.OrdinalIgnoreCase))
{
if (!ulong.TryParse(value, out RequestContentLength))
{
throw new InvalidDataException(string.Format("Wrong request content length: {0}", value));
}
}
else if (name.Equals("Transfer-Encoding", StringComparison.OrdinalIgnoreCase))
{
if (value.Equals("chunked", StringComparison.OrdinalIgnoreCase))
{
RequestIsChunked = true;
RequestContentLength = ulong.MaxValue;
}
}
else if (name.Equals("Upgrade", StringComparison.OrdinalIgnoreCase))
{
if (value.Equals("websocket", StringComparison.OrdinalIgnoreCase))
{
_webSocketReqCondition |= WebSocketReqConditions.UpgradeWebSocket;
}
}
else if (name.Equals("Sec-WebSocket-Version", StringComparison.OrdinalIgnoreCase))
{
if (value == "13")
{
_webSocketReqCondition |= WebSocketReqConditions.Version13;
}
}
else if (name.Equals("Sec-WebSocket-Key", StringComparison.OrdinalIgnoreCase))
{
_webSocketReqCondition |= WebSocketReqConditions.ValidKey;
_webSocketKey = value;
}
_next.AddRequestHeader(name, value);
}
void SkipToCR(byte[] buffer, ref int pos)
{
while (buffer[pos] != 13) pos++;
}
static void SkipTokenChars(byte[] buffer, ref int pos)
{
while (true)
{
var ch = buffer[pos];
if (ch <= 32) break;
if (ch == ':') break;
pos++;
}
}
static void SkipSpacesOrTabs(byte[] buffer, ref int pos)
{
while (IsSpaceOrTab(buffer[pos])) pos++;
}
static bool IsSpaceOrTab(byte ch)
{
return ch == 32 || ch == 9;
}
static bool SkipCrLf(byte[] buffer, ref int pos)
{
if (buffer[pos] == 13 && buffer[pos + 1] == 10)
{
pos += 2;
return true;
}
return false;
}
void ParseHttpProtocol(byte[] buffer, ref int pos, out string reqProtocol)
{
if (buffer[pos] == 'H' && buffer[pos + 1] == 'T' && buffer[pos + 2] == 'T' && buffer[pos + 3] == 'P' && buffer[pos + 4] == '/' && buffer[pos + 5] == '1' && buffer[pos + 6] == '.' && buffer[pos + 8] == 13)
{
switch (buffer[pos + 7])
{
case (byte)'0':
{
reqProtocol = "HTTP/1.0";
pos += 8;
_isHttp10 = true;
return;
}
case (byte)'1':
{
reqProtocol = "HTTP/1.1";
pos += 8;
_isHttp10 = false;
return;
}
}
reqProtocol = StringFromLatin1(buffer, pos, pos + 8);
pos += 8;
_isHttp10 = false;
return;
}
var p = pos;
SearchForFirstSpaceOrEndOfLine(buffer, ref p);
reqProtocol = StringFromLatin1(buffer, pos, p);
throw new InvalidDataException(string.Format("Unsupported request protocol: {0}", reqProtocol));
}
void ParseHttpPath(byte[] buffer, ref int pos, out string reqPath, out string reqQueryString, ref string reqScheme, out string reqHost)
{
var start = pos;
var p = start;
reqHost = null;
if (buffer[p] == '/')
{
p++;
switch (SearchForFirstSpaceOrQuestionMarkOrEndOfLine(buffer, ref p))
{
case (byte)' ':
reqPath = ParsePath(buffer, start, p);
reqQueryString = "";
pos = p + 1;
return;
case 13:
reqPath = ParsePath(buffer, start, p);
reqQueryString = "";
pos = p;
return;
case (byte)'?':
reqPath = ParsePath(buffer, start, p);
p++;
start = p;
switch (SearchForFirstSpaceOrEndOfLine(buffer, ref p))
{
case (byte)' ':
reqQueryString = StringFromLatin1(buffer, start, p);
pos = p + 1;
return;
case 13:
reqQueryString = StringFromLatin1(buffer, start, p);
pos = p;
return;
default:
throw new InvalidOperationException();
}
default:
throw new InvalidOperationException();
}
}
if (buffer[p] == '*' && buffer[p + 1] == ' ')
{
reqPath = "*";
reqQueryString = "";
pos = p + 2;
return;
}
throw new NotImplementedException();
}
char[] GetCharBuffer()
{
return _charBuffer.Value;
}
string ParsePath(byte[] buffer, int start, int end)
{
var chs = GetCharBuffer();
var used = 0;
while (start < end)
{
var ch = buffer[start++];
if (ch == '%')
{
ch = buffer[start++];
var v1 = ParseHexChar(ch);
if (v1 < 0)
{
chs[used++] = '%';
chs[used++] = (char)ch;
continue;
}
var v2 = ParseHexChar(buffer[start++]);
if (v2 < 0)
{
chs[used++] = '%';
chs[used++] = (char)ch;
chs[used++] = (char)buffer[start - 1];
continue;
}
chs[used++] = (char)(v1 * 16 + v2);
}
else
{
chs[used++] = (char)ch;
}
}
return new string(chs, 0, used);
}
public static int ParseHexChar(byte ch)
{
if (ch >= '0' && ch <= '9') return ch - '0';
if (ch >= 'A' && ch <= 'F') return ch - 'A' + 10;
if (ch >= 'a' && ch <= 'f') return ch - 'a' + 10;
return -1;
}
static byte SearchForFirstSpaceOrEndOfLine(byte[] buffer, ref int p)
{
while (true)
{
var ch = buffer[p];
if (ch == ' ' || ch == 13) return ch;
p++;
}
}
static byte SearchForFirstSpaceOrQuestionMarkOrEndOfLine(byte[] buffer, ref int p)
{
while (true)
{
var ch = buffer[p];
if (ch == ' ' || ch == '?' || ch == 13) return ch;
p++;
}
}
string ParseHttpMethod(byte[] buffer, ref int pos)
{
var p = pos;
var start = p;
switch (buffer[p])
{
case (byte)'G':
if (buffer[p + 1] == 'E' && buffer[p + 2] == 'T' && buffer[p + 3] == ' ')
{
pos = p + 4;
_webSocketReqCondition |= WebSocketReqConditions.GetMethod;
return "GET";
}
break;
case (byte)'P':
if (buffer[p + 1] == 'O' && buffer[p + 2] == 'S' && buffer[p + 3] == 'T' && buffer[p + 4] == ' ')
{
pos = p + 5;
return "POST";
}
if (buffer[p + 1] == 'U' && buffer[p + 2] == 'T' && buffer[p + 3] == ' ')
{
pos = p + 4;
return "PUT";
}
break;
case (byte)'H':
if (buffer[p + 1] == 'E' && buffer[p + 2] == 'A' && buffer[p + 3] == 'D' && buffer[p + 4] == ' ')
{
pos = p + 5;
return "HEAD";
}
break;
case (byte)'D':
if (buffer[p + 1] == 'E' && buffer[p + 2] == 'L' && buffer[p + 3] == 'E' && buffer[p + 4] == 'T' && buffer[p + 5] == 'E' && buffer[p + 6] == ' ')
{
pos = p + 7;
return "DELETE";
}
break;
case (byte)'T':
if (buffer[p + 1] == 'R' && buffer[p + 2] == 'A' && buffer[p + 3] == 'C' && buffer[p + 4] == 'E' && buffer[p + 5] == ' ')
{
pos = p + 6;
return "TRACE";
}
break;
case (byte)'O':
if (buffer[p + 1] == 'P' && buffer[p + 2] == 'T' && buffer[p + 3] == 'I' && buffer[p + 4] == 'O' && buffer[p + 5] == 'N' && buffer[p + 6] == 'S' && buffer[p + 7] == ' ')
{
pos = p + 8;
return "OPTIONS";
}
break;
case (byte)' ':
{
pos = p + 1;
return "";
}
case 13:
{
return "";
}
}
p++;
while (true)
{
var b = buffer[p];
if (b == (byte)' ')
{
pos = p + 1;
break;
}
if (b == 13)
{
pos = p;
break;
}
p++;
}
return StringFromLatin1(buffer, start, p);
}
string StringFromLatin1(byte[] buffer, int start, int end)
{
var len = end - start;
var chs = GetCharBuffer();
for (var i = 0; i < len; i++)
{
chs[i] = (char)buffer[start + i];
}
return new string(chs, 0, len);
}
void FillResponse(bool finished)
{
PrepareResponseHeaders();
var status = _statusCode;
if (status < 200 || status > 999)
{
status = 500;
}
if (finished)
{
if (_responseContentLength != ulong.MaxValue && _responseContentLength != _reqRespStream.ResponseLength)
{
status = 500;
}
_responseContentLength = _reqRespStream.ResponseLength;
}
_responseHeaderPos = 0;
HeaderAppend("HTTP/1.1 ");
HeaderAppendHttpStatus(status);
if (_reasonPhase != null)
{
HeaderAppend(" ");
HeaderAppend(_reasonPhase);
}
HeaderAppendCrLf();
if (status == 500)
{
_isKeepAlive = false;
}
if (_responseContentLength != ulong.MaxValue)
{
HeaderAppend("Content-Length: ");
HeaderAppend(_responseContentLength.ToString(CultureInfo.InvariantCulture));
HeaderAppendCrLf();
}
else
{
if (_isHttp10)
_isKeepAlive = false;
else
{
HeaderAppend("Transfer-Encoding: chunked\r\n");
_responseIsChunked = true;
}
}
if (_isHttp10 && _isKeepAlive)
{
HeaderAppend("Connection: keep-alive\r\n");
}
if (!_isKeepAlive)
{
HeaderAppend("Connection: close\r\n");
}
if (!_serverNameOverwrite)
{
HeaderAppend("Server: ");
HeaderAppend(_serverName);
HeaderAppendCrLf();
}
if (!_dateOverwrite)
{
HeaderAppend("Date: ");
HeaderAppend(_dateProvider.Value);
HeaderAppendCrLf();
}
foreach (var header in _responseHeaders)
{
if (header.Value is String)
{
HeaderAppend(header.Key);
HeaderAppend(": ");
HeaderAppend((String)header.Value);
HeaderAppendCrLf();
}
else
{
foreach (var value in (IEnumerable<string>)header.Value)
{
HeaderAppend(header.Key);
HeaderAppend(": ");
HeaderAppend(value);
HeaderAppendCrLf();
}
}
}
_responseHeaders.Clear();
HeaderAppendCrLf();
}
void HeaderAppendHttpStatus(int status)
{
// It always fits so skip buffer size check
var j = StartBufferOffset + ReceiveBufferSize + _responseHeaderPos;
_buffer[j++] = (byte)('0' + status / 100);
_buffer[j++] = (byte)('0' + status / 10 % 10);
_buffer[j] = (byte)('0' + status % 10);
_responseHeaderPos += 3;
}
void HeaderAppendCrLf()
{
if (_responseHeaderPos > ReceiveBufferSize - 2)
{
_responseHeaderPos += 2;
return;
}
var i = StartBufferOffset + ReceiveBufferSize + _responseHeaderPos;
_buffer[i] = 13;
_buffer[i + 1] = 10;
_responseHeaderPos += 2;
}
void HeaderAppend(string text)
{
if (_responseHeaderPos > ReceiveBufferSize - text.Length)
{
_responseHeaderPos += text.Length;
return;
}
var j = StartBufferOffset + ReceiveBufferSize + _responseHeaderPos;
foreach (var ch in text)
{
_buffer[j++] = (byte)ch;
}
_responseHeaderPos += text.Length;
}
void NormalizeReceiveBuffer()
{
if (ReceiveBufferPos == 0) return;
Array.Copy(_buffer, StartBufferOffset + ReceiveBufferPos, _buffer, StartBufferOffset, _receiveBufferFullness - StartBufferOffset - ReceiveBufferPos);
_receiveBufferFullness -= ReceiveBufferPos;
ReceiveBufferPos = 0;
}
void SendHttpResponseAndPrepareForNext()
{
var offset = _reqRespStream.ResponseStartOffset;
var len = _reqRespStream.ResponseLocalPos;
if (!_responseHeadersSend)
{
FillResponse(true);
if (_responseHeaderPos > ReceiveBufferSize)
{
SendInternalServerError();
return;
}
OptimallyMergeTwoRegions(_buffer, StartBufferOffset + ReceiveBufferSize, _responseHeaderPos, ref offset, ref len);
_responseHeadersSend = true;
}
else
{
if (_responseContentLength != ulong.MaxValue && _reqRespStream.ResponseLength != _responseContentLength)
{
CloseConnection();
return;
}
if (_responseIsChunked)
{
if (len != 0)
{
WrapInChunk(_buffer, ref offset, ref len);
}
AppendZeroChunk(_buffer, offset, ref len);
}
}
var tcs = _tcsSend;
if (tcs != null)
{
tcs.Task.ContinueWith(_ =>
{
_lastPacket = true;
Callback.StartSend(_buffer, offset, len);
});
return;
}
_lastPacket = true;
Callback.StartSend(_buffer, offset, len);
}
static void AppendZeroChunk(byte[] buffer, int offset, ref int len)
{
offset += len;
buffer[offset++] = (byte)'0';
buffer[offset++] = 13;
buffer[offset++] = 10;
buffer[offset++] = 13;
buffer[offset] = 10;
len += 5;
}
async Task DrainRequestStreamAsync()
{
while (true)
{
var len = await _reqRespStream.ReadAsync(_buffer, StartBufferOffset + ReceiveBufferSize, ReceiveBufferSize);
if (len < ReceiveBufferSize) return;
}
}
void SendInternalServerError()
{
var tcs = _tcsSend;
if (tcs != null)
{
tcs.Task.ContinueWith(_ => SendInternalServerError());
return;
}
_isKeepAlive = false;
_lastPacket = true;
try
{
Callback.StartSend(Server.Status500InternalServerError, 0, Server.Status500InternalServerError.Length);
}
catch (Exception)
{
CloseConnection();
}
}
static int FindRequestEnd(byte[] buffer, int start, int end)
{
var pos = start;
while (pos < end)
{
var ch = buffer[pos++];
if (ch != 13) continue;
if (pos >= end) break;
ch = buffer[pos++];
if (ch != 10) continue;
if (pos >= end) break;
ch = buffer[pos++];
if (ch != 13) continue;
if (pos >= end) break;
ch = buffer[pos++];
if (ch != 10) continue;
return pos;
}
return -1;
}
void ResetForNextRequest()
{
_waitingForRequest = true;
_responseHeadersSend = false;
_lastPacket = false;
_reqRespStream.Reset();
}
public Task WriteAsync(byte[] buffer, int startOffset, int len)
{
if (!_responseHeadersSend)
{
if (_buffer != buffer) throw new InvalidOperationException();
FillResponse(false);
if (_responseHeaderPos > ReceiveBufferSize) throw new ArgumentException(string.Format("Response headers are longer({0}) than buffer({1})", _responseHeaderPos, ReceiveBufferSize));
if (_responseIsChunked && len != 0)
{
WrapInChunk(_buffer, ref startOffset, ref len);
}
OptimallyMergeTwoRegions(_buffer, StartBufferOffset + ReceiveBufferSize, _responseHeaderPos, ref startOffset, ref len);
_responseHeadersSend = true;
}
else if (_responseIsChunked)
{
if (_buffer != buffer) throw new InvalidOperationException();
if (len == 0) return Task.Delay(0);
WrapInChunk(_buffer, ref startOffset, ref len);
}
if (_responseContentLength != ulong.MaxValue && _reqRespStream.ResponseLength > _responseContentLength)
{
CloseConnection();
throw new ArgumentOutOfRangeException("len", "Cannot send more bytes than specified in Content-Length header");
}
var tcs = _tcsSend;
if (tcs != null)
{
return tcs.Task.ContinueWith(_ =>
{
tcs = new TaskCompletionSource<bool>();
Thread.MemoryBarrier();
if (_tcsSend != null)
{
throw new InvalidOperationException("Want to start send but previous is still sending");
}
_tcsSend = tcs;
Callback.StartSend(buffer, startOffset, len);
});
}
tcs = new TaskCompletionSource<bool>();
Thread.MemoryBarrier();
_tcsSend = tcs;
Callback.StartSend(buffer, startOffset, len);
return tcs.Task;
}
static void WrapInChunk(byte[] buffer, ref int startOffset, ref int len)
{
var l = (uint)len;
var o = (uint)startOffset;
buffer[o + l] = 13;
buffer[o + l + 1] = 10;
buffer[--o] = 10;
buffer[--o] = 13;
len += 4;
do
{
var h = l & 15;
if (h < 10) h += '0'; else h += 'A' - 10;
buffer[--o] = (byte)h;
len++;
l /= 16;
} while (l > 0);
startOffset = (int)o;
}
static void OptimallyMergeTwoRegions(byte[] buffer, int o1, int l1, ref int o2, ref int l2)
{
if (l1 < l2)
{
Array.Copy(buffer, o1, buffer, o2 - l1, l1);
o2 -= l1;
}
else
{
Array.Copy(buffer, o2, buffer, o1 + l1, l2);
o2 = o1;
}
l2 += l1;
}
public void Send100Continue()
{
var tcs = new TaskCompletionSource<bool>();
Thread.MemoryBarrier();
_tcsSend = tcs;
Callback.StartSend(_buffer, _constantsOffset, Server.Status100Continue.Length);
}
public void StartNextReceive()
{
NormalizeReceiveBuffer();
var count = StartBufferOffset + ReceiveBufferSize - _receiveBufferFullness;
if (count > 0)
{
Callback.StartReceive(_buffer, _receiveBufferFullness, count);
}
}
public void Dispose()
{
}
public ITransportLayerCallback Callback { set; private get; }
public void PrepareAccept()
{
_disconnecting = 0;
Callback.StartAccept(_buffer, StartBufferOffset, ReceiveBufferSize);
}
public void FinishAccept(byte[] buffer, int offset, int length, IPEndPoint remoteEndPoint, IPEndPoint localEndPoint)
{
ResetForNextRequest();
ReceiveBufferPos = 0;
_remoteEndPoint = remoteEndPoint;
_knownIsLocal = false;
_remoteIpAddress = null;
_remotePort = null;
if (!localEndPoint.Equals(_localEndPoint))
{
_localEndPoint = localEndPoint;
_localIpAddress = null;
_localPort = null;
}
_receiveBufferFullness = StartBufferOffset;
if (length == 0)
{
StartNextReceive();
return;
}
FinishReceive(buffer, offset, length);
}
public void FinishReceive(byte[] buffer, int offset, int length)
{
if (length == -1)
{
if (_waitingForRequest)
{
CloseConnection();
}
else
{
if (_startedReceiveData)
{
_startedReceiveData = false;
_next.FinishReceiveData(false);
}
_cancellation.Cancel();
_reqRespStream.ConnectionClosed();
}
return;
}
TraceSources.CoreDebug.TraceInformation("======= Offset {0}, Length {1}", offset - StartBufferOffset, length);
TraceSources.CoreDebug.TraceInformation(Encoding.UTF8.GetString(buffer, offset, length));
_receiveBufferFullness = offset + length;
if (_waitingForRequest)
{
NormalizeReceiveBuffer();
var posOfReqEnd = FindRequestEnd(_buffer, StartBufferOffset, _receiveBufferFullness);
if (posOfReqEnd < 0)
{
var count = StartBufferOffset + ReceiveBufferSize - _receiveBufferFullness;
if (count > 0)
{
StartNextReceive();
return;
}
SendInternalServerError();
return;
}
_waitingForRequest = false;
try
{
ReceiveBufferPos = posOfReqEnd - StartBufferOffset;
ParseRequest(_buffer, StartBufferOffset, posOfReqEnd);
_next.HandleRequest();
}
catch (Exception)
{
ResponseStatusCode = 500;
ResponseReasonPhase = null;
ResponseFinished();
}
}
else
{
if (_startedReceiveData)
{
_startedReceiveData = false;
_next.FinishReceiveData(true);
}
else if (_reqRespStream.ProcessDataAndShouldReadMore())
{
StartNextReceive();
}
}
}
public void FinishSend(Exception exception)
{
if (exception == null)
{
var tcs = _tcsSend;
_tcsSend = null;
if (tcs != null)
{
tcs.SetResult(true);
}
else if (_isWebSocket)
{
_isWebSocket = false;
_next.UpgradedToWebSocket(true);
}
}
else
{
var tcs = _tcsSend;
_tcsSend = null;
_isKeepAlive = false;
if (tcs != null)
{
tcs.SetException(exception);
}
else if (_isWebSocket)
{
_isWebSocket = false;
_next.UpgradedToWebSocket(false);
}
}
if (_lastPacket)
{
_lastPacket = false;
if (_isKeepAlive)
{
ResetForNextRequest();
StartNextReceive();
}
else
{
CloseConnection();
}
}
}
public CancellationToken CallCancelled
{
get { return _cancellation.Token; }
}
public Stream ReqRespBody
{
get { return _reqRespStream; }
}
public string RequestPath
{
get { return _requestPath; }
}
public string RequestQueryString
{
get { return _requestQueryString; }
}
public string RequestMethod
{
get { return _requestMethod; }
}
public string RequestScheme
{
get { return _requestScheme; }
}
public string RequestProtocol
{
get { return _requestProtocol; }
}
public string RemoteIpAddress
{
get { return _remoteIpAddress ?? (_remoteIpAddress = _remoteEndPoint.Address.ToString()); }
}
public string RemotePort
{
get { return _remotePort ?? (_remotePort = _remoteEndPoint.Port.ToString(CultureInfo.InvariantCulture)); }
}
public string LocalIpAddress
{
get { return _localIpAddress ?? (_localIpAddress = _localEndPoint.Address.ToString()); }
}
public string LocalPort
{
get { return _localPort ?? (_localPort = _localEndPoint.Port.ToString(CultureInfo.InvariantCulture)); }
}
public bool IsLocal
{
get
{
if (!_knownIsLocal) _isLocal = _ipIsLocalChecker.IsLocal(_remoteEndPoint.Address);
return _isLocal;
}
}
public bool IsWebSocketReq
{
get { return _webSocketReqCondition == WebSocketReqConditions.AllSatisfied; }
}
public int ResponseStatusCode
{
set { _statusCode = value; }
}
public string ResponseReasonPhase
{
set { _reasonPhase = value; }
}
public ulong ResponseContentLength
{
set { _responseContentLength = value; }
}
public bool KeepAlive
{
set { _isKeepAlive = value; }
}
public void AddResponseHeader(string name, string value)
{
CheckForHeaderOverwrite(name);
_responseHeaders.Add(new KeyValuePair<string, object>(name, value));
}
public void AddResponseHeader(string name, IEnumerable<string> values)
{
CheckForHeaderOverwrite(name);
_responseHeaders.Add(new KeyValuePair<string, object>(name, values));
}
void CheckForHeaderOverwrite(string name)
{
if (name.Length == 4 && name.Equals("Date", StringComparison.OrdinalIgnoreCase))
{
_dateOverwrite = true;
}
else if (name.Length == 6 && name.Equals("Server", StringComparison.OrdinalIgnoreCase))
{
_serverNameOverwrite = true;
}
}
public void UpgradeToWebSocket()
{
if (_responseHeadersSend)
{
_isKeepAlive = false;
CloseConnection();
return;
}
PrepareResponseHeaders();
_isKeepAlive = false;
_responseHeaderPos = 0;
HeaderAppend("HTTP/1.1 101 Switching Protocols\r\nUpgrade: websocket\r\nConnection: Upgrade\r\nSec-WebSocket-Accept: ");
var sha1 = new SHA1Managed();
var hash = sha1.ComputeHash(Encoding.ASCII.GetBytes(_webSocketKey + "258EAFA5-E914-47DA-95CA-C5AB0DC85B11"));
HeaderAppend(Convert.ToBase64String(hash));
HeaderAppendCrLf();
foreach (var header in _responseHeaders)
{
if (header.Value is String)
{
HeaderAppend(header.Key);
HeaderAppend(": ");
HeaderAppend((String)header.Value);
HeaderAppendCrLf();
}
else
{
foreach (var value in (IEnumerable<string>)header.Value)
{
HeaderAppend(header.Key);
HeaderAppend(": ");
HeaderAppend(value);
HeaderAppendCrLf();
}
}
}
_responseHeaders.Clear();
HeaderAppendCrLf();
if (_responseHeaderPos > ReceiveBufferSize)
{
SendInternalServerError();
throw new ArgumentOutOfRangeException();
}
_isWebSocket = true;
Callback.StartSend(_buffer, StartBufferOffset + ReceiveBufferSize, _responseHeaderPos);
}
void PrepareResponseHeaders()
{
_dateOverwrite = false;
_serverNameOverwrite = false;
_next.PrepareResponseHeaders();
}
public void ResponseFinished()
{
if (_statusCode == 500 || _cancellation.IsCancellationRequested)
{
_cancellation.Cancel();
if (!_responseHeadersSend)
SendInternalServerError();
else
{
_isKeepAlive = false;
CloseConnection();
}
return;
}
if (_reqRespStream.RequestPosition != RequestContentLength)
{
DrainRequestStreamAsync().ContinueWith((t, o) =>
{
if (t.IsFaulted || t.IsCanceled)
{
ResponseStatusCode = 500;
((Transport2HttpHandler)o).ResponseFinished();
return;
}
((Transport2HttpHandler)o).SendHttpResponseAndPrepareForNext();
}, this);
return;
}
SendHttpResponseAndPrepareForNext();
}
public void CloseConnection()
{
if (Interlocked.CompareExchange(ref _disconnecting, 1, 0) == 0)
Callback.StartDisconnect();
}
public bool HeadersSend
{
get { return _responseHeadersSend; }
}
public byte[] Buffer
{
get { return _buffer; }
}
public int ReceiveDataOffset
{
get { return StartBufferOffset + ReceiveBufferPos; }
}
public int ReceiveDataLength
{
get { return _receiveBufferFullness - StartBufferOffset - ReceiveBufferPos; }
}
public void ConsumeReceiveData(int count)
{
ReceiveBufferPos += count;
}
public void StartReceiveData()
{
_startedReceiveData = true;
StartNextReceive();
}
public int SendDataOffset
{
get { return StartBufferOffset + ReceiveBufferSize; }
}
public int SendDataLength
{
get { return ReceiveBufferSize * 2 + Transport2HttpFactory.AdditionalSpace; }
}
public Task SendData(byte[] buffer, int offset, int length)
{
var tcs = new TaskCompletionSource<bool>();
_tcsSend = tcs;
Callback.StartSend(buffer, offset, length);
return tcs.Task;
}
public bool CanUseDirectWrite()
{
return !_responseIsChunked && _responseHeadersSend;
}
}
}
| |
namespace DynamicProxy
{
using System;
using System.Collections.Generic;
using System.Dynamic;
using System.Linq;
using System.Linq.Expressions;
using System.Reflection;
using ImpromptuInterface;
internal static class BinderExtensions
{
public static IList<Type> GetTypeArguments(this InvokeMemberBinder binder)
{
return Impromptu.InvokeGet(binder, "Microsoft.CSharp.RuntimeBinder.ICSharpInvokeOrInvokeMemberBinder.TypeArguments") as IList<Type>;
}
}
/// <summary>
/// Contains extension methods for the <see cref="Expression"/> type.
/// </summary>
internal static class ExpressionExtensions
{
/// <summary>
/// Retrieves the member that an expression is defined for.
/// </summary>
/// <param name="expression">The expression to retreive the member from.</param>
/// <returns>A <see cref="MemberInfo"/> instance if the member could be found; otherwise <see langword="null"/>.</returns>
internal static MemberInfo GetTargetMemberInfo(this Expression expression)
{
switch (expression.NodeType)
{
case ExpressionType.Convert:
return GetTargetMemberInfo(((UnaryExpression)expression).Operand);
case ExpressionType.Lambda:
return GetTargetMemberInfo(((LambdaExpression)expression).Body);
case ExpressionType.Call:
return ((MethodCallExpression)expression).Method;
case ExpressionType.MemberAccess:
return ((MemberExpression)expression).Member;
default:
return null;
}
}
internal static MethodCallExpression GetTargetMethodCall(this Expression expression)
{
switch (expression.NodeType)
{
case ExpressionType.Convert:
return GetTargetMethodCall(((UnaryExpression)expression).Operand);
case ExpressionType.Lambda:
return GetTargetMethodCall(((LambdaExpression)expression).Body);
case ExpressionType.Call:
return ((MethodCallExpression)expression);
default:
return null;
}
}
}
public partial class ProxyFactory<T> : DynamicObject, IProxy<T>
{
private readonly T _wrappedObject;
private readonly Type _wrappedObjectType;
private readonly Func<object, object> _identity = x => x;
private readonly Dictionary<string, PropertyInfo> _properties;
private readonly Dictionary<Tuple<string, bool, int>, MethodInfo> _publicMethods;
private readonly Dictionary<Tuple<string, bool>, Delegate> _interceptors = new Dictionary<Tuple<string, bool>, Delegate>();
private readonly Dictionary<Tuple<string, Direction>, Delegate> _propertyInterceptors = new Dictionary<Tuple<string, Direction>, Delegate>();
private readonly Dictionary<Tuple<string, bool>, Delegate> _outTransformers = new Dictionary<Tuple<string, bool>, Delegate>();
private readonly Dictionary<string, Tuple<int, Delegate>> _inTransformers = new Dictionary<string, Tuple<int, Delegate>>();
public static T1 Proxy<T1>(T obj) where T1 : class
{
if (!typeof(T1).IsInterface)
throw new ArgumentException("T1 must be an Interface");
return new ProxyFactory<T>(obj).ActLike<T1>(typeof(IProxy<T>));
}
private ProxyFactory(T obj)
{
_wrappedObject = obj;
_wrappedObjectType = typeof(T);
_properties = _wrappedObjectType.GetProperties().ToDictionary(p => p.Name, p => p);
_publicMethods =
_wrappedObjectType.GetMethods()
.Where(m => !m.IsSpecialName) //exclude property functions, more maybe needed here.
.Where(m => m.IsPublic)
.Where(m => m.DeclaringType == _wrappedObjectType)
.ToDictionary(m => Tuple.Create(m.Name, m.IsGenericMethod, m.GetParameters().Length), p => p);
}
public override bool TryGetMember(GetMemberBinder binder, out object result)
{
var selectorTuple = Tuple.Create(binder.Name, Direction.Out);
var outTransformer = _outTransformers.ContainsKey(Tuple.Create(binder.Name, false))
? _outTransformers[Tuple.Create(binder.Name, false)]
: _identity;
var interceptor = _propertyInterceptors.ContainsKey(selectorTuple)
? _propertyInterceptors[selectorTuple]
: null;
var property = _properties[binder.Name];
object partialResult;
if (interceptor != null)
{
Func<object, object> meth = property.GetValue;
var curriedMethod = new Func<object[], object>(args => meth(_wrappedObject));
partialResult = interceptor.FastDynamicInvoke(new object[] { curriedMethod, new object[] {} });
}
else
{
partialResult = property.GetValue(_wrappedObject);
}
result = outTransformer.FastDynamicInvoke(partialResult);
return true;
}
public override bool TrySetMember(SetMemberBinder binder, object value)
{
var selectorTuple = Tuple.Create(binder.Name, Direction.In);
var property = _properties[binder.Name];
var inTransformer = _inTransformers.ContainsKey(binder.Name) ? _inTransformers[binder.Name].Item2 : _identity;
var interceptor = _propertyInterceptors.ContainsKey(selectorTuple) ? _propertyInterceptors[selectorTuple] : null;
var transformedValue = inTransformer.FastDynamicInvoke(value);
if (interceptor != null)
{
Action<object, object> meth = property.SetValue;
Func<object, Action<object>> curry = Impromptu.Curry(meth);
Func<object[], object> curriedMethod = args =>
{
curry(_wrappedObject)(args[0]);
return null;
};
interceptor.FastDynamicInvoke(new object [] { curriedMethod, new [] { transformedValue } });
}
else
{
property.SetValue(_wrappedObject, transformedValue);
}
return true;
}
public override bool TryInvokeMember(InvokeMemberBinder binder, object[] args, out object result)
{
Delegate outTransformer;
MethodInfo method;
Delegate interceptor;
Tuple<int, Delegate> inTransformer;
try
{
var typeArgs = binder.GetTypeArguments();
var hasTypeArgs = typeArgs != null && typeArgs.Count > 0;
var methodSelectorTuple = Tuple.Create(binder.Name, hasTypeArgs, args.Length);
var selectorTuple = Tuple.Create(binder.Name, hasTypeArgs);
method = _publicMethods[methodSelectorTuple];
outTransformer = _outTransformers.ContainsKey(selectorTuple) ? _outTransformers[selectorTuple] : _identity;
interceptor = _interceptors.ContainsKey(selectorTuple) ? _interceptors[selectorTuple] : null;
inTransformer = _inTransformers.ContainsKey(binder.Name) ? _inTransformers[binder.Name] : null;
if (hasTypeArgs)
{
method = method.MakeGenericMethod(typeArgs.ToArray());
}
}
catch
{
result = null;
return false;
}
try
{
if (inTransformer != null)
{
args[inTransformer.Item1] = inTransformer.Item2.FastDynamicInvoke(args[inTransformer.Item1]);
}
object partialResult;
if (interceptor != null)
{
Func<object, object[], object> meth = method.Invoke;
Func<object, Func<object[], object>> curry = Impromptu.Curry(meth);
var curriedMethod = curry(_wrappedObject);
partialResult = interceptor.FastDynamicInvoke(new object[] {curriedMethod, args});
}
else
{
partialResult = method.Invoke(_wrappedObject, args);
}
result = outTransformer.FastDynamicInvoke(partialResult);
}
catch (TargetInvocationException e)
{
throw e.InnerException;
}
return true;
}
public IProxy<T> AddTransformer<T2, TResult>(Expression fucntionOrProperty, Direction direction, Func<T2, TResult> transformer)
{
var memberInfo = fucntionOrProperty.GetTargetMemberInfo();
var functionOrPropertyName = memberInfo.Name;
var hasGenericTypeArguments = (memberInfo.MemberType == MemberTypes.Method) && ((MethodInfo) memberInfo).IsGenericMethod;
if (direction == Direction.In)
{
switch (memberInfo.MemberType)
{
case MemberTypes.Property:
_inTransformers.Add(functionOrPropertyName, Tuple.Create(0, (Delegate)transformer));
break;
case MemberTypes.Method:
{
var methodCall = fucntionOrProperty.GetTargetMethodCall();
var arg = methodCall.Arguments.Select((a, i) => Tuple.Create(i, (MemberExpression) a)).SingleOrDefault(t => t.Item2.Member.Name == "Selected");
if (arg == null)
{
throw new InvalidOperationException("Must contain exactly one `A<T>.Selected` parameter value");
}
_inTransformers.Add(functionOrPropertyName, Tuple.Create(arg.Item1, (Delegate)transformer));
}
break;
}
}
else
{
_outTransformers.Add(Tuple.Create(functionOrPropertyName, hasGenericTypeArguments), transformer);
}
return this;
}
public IProxy<T> AddTransformer<T1, T2>(Expression<Action<T>> function, Direction direction, Func<T1, T2> transformer)
{
return AddTransformer((Expression) function, direction, transformer);
}
public IProxy<T> AddTransformer<T1, T2>(Expression<Func<T, T1>> property, Direction direction, Func<T1, T2> transformer)
{
return AddTransformer((Expression) property, direction, transformer);
}
public IProxy<T> AddTransformer<T2>(Expression<Action<T>> function, Direction direction, Func<T2, T2> transformer)
{
return AddTransformer<T2, T2>(function, direction, transformer);
}
public IProxy<T> AddInterceptor<TProp>(Expression<Func<T, TProp>> property, Func<Func<TProp>, TProp> interceptor)
{
return AddInterceptor<TProp, TProp>(property, interceptor);
}
public IProxy<T> AddInterceptor<TOriginal, TNew>(Expression<Func<T, TOriginal>> property, Func<Func<TOriginal>, TNew> interceptor)
{
var memberType = property.GetTargetMemberInfo();
if (memberType == null)
{
throw new ArgumentException("Can only add interceptors for Properties or Functions");
}
switch (memberType.MemberType)
{
case MemberTypes.Constructor:
case MemberTypes.Event:
case MemberTypes.Field:
case MemberTypes.TypeInfo:
case MemberTypes.Custom:
case MemberTypes.NestedType:
throw new ArgumentException("Can only add interceptors for Properties or Functions");
case MemberTypes.Method:
return AddFunctionInterceptor(property, (del, args) => interceptor(() => (TOriginal)del(new object[] { })));
case MemberTypes.Property:
return AddPropertyInterceptor(property, (del, args) => interceptor(() => (TOriginal)del(new object[] { })), Direction.Out);
default:
throw new ArgumentOutOfRangeException();
}
}
public IProxy<T> AddInterceptor<TProp>(Expression<Func<T, TProp>> property, Action<Action<TProp>, TProp> interceptor)
{
return AddInterceptor<TProp, TProp>(property, interceptor);
}
public IProxy<T> AddInterceptor<TOriginal, TNew>(Expression<Func<T, TOriginal>> property, Action<Action<TOriginal>, TNew> interceptor)
{
var memberType = property.GetTargetMemberInfo();
if (memberType == null)
{
throw new ArgumentException("Can only add interceptors for Properties or Functions");
}
return AddPropertyInterceptor(property, (del, args) =>
{
interceptor(a => del(new object[] { a }), (TNew)args[0]);
return null;
}, Direction.In);
}
public IProxy<T> AddInterceptor(Expression<Action<T>> function, Action<Action> interceptor)
{
return AddFunctionInterceptor(function, (del, args) => { interceptor(() => del(new object[] {})); return null; });
}
/// <summary>
/// Mother of all Interceptor add functions. Actually does the adding.
/// </summary>
/// <param name="function"></param>
/// <param name="func"></param>
/// <returns></returns>
private IProxy<T> AddFunctionInterceptor(Expression function, Func<Func<object[], object>, object[], object> func)
{
var memberInfo = function.GetTargetMemberInfo();
if (memberInfo == null)
{
throw new ArgumentException("Can only add interceptors for Properties or Functions");
}
var methodInfo = memberInfo as MethodInfo;
var functionOrPropertyName = methodInfo.Name;
var hasGenericTypeArguments = methodInfo.IsGenericMethod;
_interceptors.Add(Tuple.Create(functionOrPropertyName, hasGenericTypeArguments),new Func<Delegate, object[], object>((del, args) => func(arg => del.FastDynamicInvoke(new object[] {arg}), args)));
return this;
}
/// <summary>
/// Mother of all Interceptor add functions. Actually does the adding.
/// </summary>
/// <param name="property"></param>
/// <param name="func"></param>
/// <param name="direction"></param>
/// <returns></returns>
private IProxy<T> AddPropertyInterceptor<TResult>(Expression<Func<T,TResult>> property, Func<Func<object[], object>, object[], object> func, Direction direction)
{
var memberInfo = property.GetTargetMemberInfo();
var propertyInfo = memberInfo as PropertyInfo;
var propertyName = propertyInfo.Name;
_propertyInterceptors.Add(Tuple.Create(propertyName, direction), new Func<Delegate, object[], object>((del, args) => func(arg => del.FastDynamicInvoke(new object[] { arg }), args)));
return this;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
using Microsoft.AspNetCore.Authorization;
using Microsoft.AspNetCore.Mvc;
using Microsoft.AspNetCore.Mvc.Localization;
using Microsoft.AspNetCore.Mvc.Rendering;
using Microsoft.AspNetCore.Routing;
using Microsoft.Extensions.Localization;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using Newtonsoft.Json.Serialization;
using OrchardCore.Admin;
using OrchardCore.DisplayManagement;
using OrchardCore.DisplayManagement.ModelBinding;
using OrchardCore.DisplayManagement.Notify;
using OrchardCore.Navigation;
using OrchardCore.Routing;
using OrchardCore.Settings;
using OrchardCore.Workflows.Activities;
using OrchardCore.Workflows.Helpers;
using OrchardCore.Workflows.Indexes;
using OrchardCore.Workflows.Models;
using OrchardCore.Workflows.Services;
using OrchardCore.Workflows.ViewModels;
using YesSql;
using YesSql.Services;
namespace OrchardCore.Workflows.Controllers
{
[Admin]
public class WorkflowTypeController : Controller
{
private readonly ISiteService _siteService;
private readonly ISession _session;
private readonly IActivityLibrary _activityLibrary;
private readonly IWorkflowManager _workflowManager;
private readonly IWorkflowTypeStore _workflowTypeStore;
private readonly IWorkflowTypeIdGenerator _workflowTypeIdGenerator;
private readonly IAuthorizationService _authorizationService;
private readonly IActivityDisplayManager _activityDisplayManager;
private readonly INotifier _notifier;
private readonly ISecurityTokenService _securityTokenService;
private readonly IUpdateModelAccessor _updateModelAccessor;
private readonly dynamic New;
private readonly IStringLocalizer S;
private readonly IHtmlLocalizer H;
public WorkflowTypeController
(
ISiteService siteService,
ISession session,
IActivityLibrary activityLibrary,
IWorkflowManager workflowManager,
IWorkflowTypeStore workflowTypeStore,
IWorkflowTypeIdGenerator workflowTypeIdGenerator,
IAuthorizationService authorizationService,
IActivityDisplayManager activityDisplayManager,
IShapeFactory shapeFactory,
INotifier notifier,
ISecurityTokenService securityTokenService,
IStringLocalizer<WorkflowTypeController> s,
IHtmlLocalizer<WorkflowTypeController> h,
IUpdateModelAccessor updateModelAccessor)
{
_siteService = siteService;
_session = session;
_activityLibrary = activityLibrary;
_workflowManager = workflowManager;
_workflowTypeStore = workflowTypeStore;
_workflowTypeIdGenerator = workflowTypeIdGenerator;
_authorizationService = authorizationService;
_activityDisplayManager = activityDisplayManager;
_notifier = notifier;
_securityTokenService = securityTokenService;
_updateModelAccessor = updateModelAccessor;
New = shapeFactory;
S = s;
H = h;
}
public async Task<IActionResult> Index(WorkflowTypeIndexOptions options, PagerParameters pagerParameters)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
var siteSettings = await _siteService.GetSiteSettingsAsync();
var pager = new Pager(pagerParameters, siteSettings.PageSize);
if (options == null)
{
options = new WorkflowTypeIndexOptions();
}
var query = _session.Query<WorkflowType, WorkflowTypeIndex>();
switch (options.Filter)
{
case WorkflowTypeFilter.All:
default:
break;
}
if (!string.IsNullOrWhiteSpace(options.Search))
{
query = query.Where(w => w.Name.Contains(options.Search));
}
switch (options.Order)
{
case WorkflowTypeOrder.Name:
query = query.OrderBy(u => u.Name);
break;
}
var count = await query.CountAsync();
var workflowTypes = await query
.Skip(pager.GetStartIndex())
.Take(pager.PageSize)
.ListAsync();
var workflowTypeIds = workflowTypes.Select(x => x.WorkflowTypeId).ToList();
var workflowGroups = (await _session.QueryIndex<WorkflowIndex>(x => x.WorkflowTypeId.IsIn(workflowTypeIds))
.ListAsync())
.GroupBy(x => x.WorkflowTypeId)
.ToDictionary(x => x.Key);
// Maintain previous route data when generating page links.
var routeData = new RouteData();
routeData.Values.Add("Options.Filter", options.Filter);
routeData.Values.Add("Options.Search", options.Search);
routeData.Values.Add("Options.Order", options.Order);
var pagerShape = (await New.Pager(pager)).TotalItemCount(count).RouteData(routeData);
var model = new WorkflowTypeIndexViewModel
{
WorkflowTypes = workflowTypes
.Select(x => new WorkflowTypeEntry
{
WorkflowType = x,
Id = x.Id,
WorkflowCount = workflowGroups.ContainsKey(x.WorkflowTypeId) ? workflowGroups[x.WorkflowTypeId].Count() : 0,
Name = x.Name
})
.ToList(),
Options = options,
Pager = pagerShape
};
model.Options.WorkflowTypesBulkAction = new List<SelectListItem>() {
new SelectListItem() { Text = S["Delete"].Value, Value = nameof(WorkflowTypeBulkAction.Delete) }
};
return View(model);
}
[HttpPost, ActionName("Index")]
[FormValueRequired("submit.Filter")]
public ActionResult IndexFilterPOST(WorkflowTypeIndexViewModel model)
{
return RedirectToAction("Index", new RouteValueDictionary {
{ "Options.Search", model.Options.Search }
});
}
[HttpPost]
[ActionName(nameof(Index))]
[FormValueRequired("submit.BulkAction")]
public async Task<IActionResult> BulkEdit(WorkflowTypeIndexOptions options, IEnumerable<int> itemIds)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
if (itemIds?.Count() > 0)
{
var checkedEntries = await _session.Query<WorkflowType, WorkflowTypeIndex>().Where(x => x.DocumentId.IsIn(itemIds)).ListAsync();
switch (options.BulkAction)
{
case WorkflowTypeBulkAction.None:
break;
case WorkflowTypeBulkAction.Delete:
foreach (var entry in checkedEntries)
{
var workflowType = await _workflowTypeStore.GetAsync(entry.Id);
if (workflowType != null)
{
await _workflowTypeStore.DeleteAsync(workflowType);
_notifier.Success(H["Workflow {0} has been deleted.", workflowType.Name]);
}
}
break;
default:
throw new ArgumentOutOfRangeException();
}
}
return RedirectToAction("Index");
}
public async Task<IActionResult> EditProperties(int? id, string returnUrl = null)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
if (id == null)
{
return View(new WorkflowTypePropertiesViewModel
{
IsEnabled = true,
ReturnUrl = returnUrl
});
}
else
{
var workflowType = await _session.GetAsync<WorkflowType>(id.Value);
return View(new WorkflowTypePropertiesViewModel
{
Id = workflowType.Id,
Name = workflowType.Name,
IsEnabled = workflowType.IsEnabled,
IsSingleton = workflowType.IsSingleton,
DeleteFinishedWorkflows = workflowType.DeleteFinishedWorkflows,
ReturnUrl = returnUrl
});
}
}
[HttpPost]
public async Task<IActionResult> EditProperties(WorkflowTypePropertiesViewModel viewModel, int? id)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
if (!ModelState.IsValid)
{
return View(viewModel);
}
var isNew = id == null;
var workflowType = default(WorkflowType);
if (isNew)
{
workflowType = new WorkflowType();
workflowType.WorkflowTypeId = _workflowTypeIdGenerator.GenerateUniqueId(workflowType);
}
else
{
workflowType = await _session.GetAsync<WorkflowType>(id.Value);
if (workflowType == null)
{
return NotFound();
}
}
workflowType.Name = viewModel.Name?.Trim();
workflowType.IsEnabled = viewModel.IsEnabled;
workflowType.IsSingleton = viewModel.IsSingleton;
workflowType.DeleteFinishedWorkflows = viewModel.DeleteFinishedWorkflows;
await _workflowTypeStore.SaveAsync(workflowType);
return isNew
? RedirectToAction("Edit", new { workflowType.Id })
: Url.IsLocalUrl(viewModel.ReturnUrl)
? (IActionResult)Redirect(viewModel.ReturnUrl)
: RedirectToAction("Index");
}
public async Task<IActionResult> Duplicate(int id, string returnUrl = null)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
var workflowType = await _session.GetAsync<WorkflowType>(id);
if (workflowType == null)
{
return NotFound();
}
return View(new WorkflowTypePropertiesViewModel
{
Id = id,
IsSingleton = workflowType.IsSingleton,
Name = "Copy-" + workflowType.Name,
IsEnabled = workflowType.IsEnabled,
ReturnUrl = returnUrl
});
}
[HttpPost]
public async Task<IActionResult> Duplicate(WorkflowTypePropertiesViewModel viewModel, int id)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
if (!ModelState.IsValid)
{
return View(viewModel);
}
var existingWorkflowType = await _session.GetAsync<WorkflowType>(id);
var workflowType = new WorkflowType();
workflowType.WorkflowTypeId = _workflowTypeIdGenerator.GenerateUniqueId(workflowType);
workflowType.Name = viewModel.Name?.Trim();
workflowType.IsEnabled = viewModel.IsEnabled;
workflowType.IsSingleton = viewModel.IsSingleton;
workflowType.DeleteFinishedWorkflows = viewModel.DeleteFinishedWorkflows;
workflowType.Activities = existingWorkflowType.Activities;
workflowType.Transitions = existingWorkflowType.Transitions;
await _workflowTypeStore.SaveAsync(workflowType);
return RedirectToAction("Edit", new { workflowType.Id });
}
public async Task<IActionResult> Edit(int id, string localId)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
var newLocalId = string.IsNullOrWhiteSpace(localId) ? Guid.NewGuid().ToString() : localId;
var availableActivities = _activityLibrary.ListActivities();
var workflowType = await _session.GetAsync<WorkflowType>(id);
if (workflowType == null)
{
return NotFound();
}
var workflow = _workflowManager.NewWorkflow(workflowType);
var workflowContext = await _workflowManager.CreateWorkflowExecutionContextAsync(workflowType, workflow);
var activityContexts = await Task.WhenAll(workflowType.Activities.Select(async x => await _workflowManager.CreateActivityExecutionContextAsync(x, x.Properties)));
var workflowCount = await _session.QueryIndex<WorkflowIndex>(x => x.WorkflowTypeId == workflowType.WorkflowTypeId).CountAsync();
var activityThumbnailShapes = new List<dynamic>();
var index = 0;
foreach (var activity in availableActivities)
{
activityThumbnailShapes.Add(await BuildActivityDisplay(activity, index++, id, newLocalId, "Thumbnail"));
}
var activityDesignShapes = new List<dynamic>();
index = 0;
foreach (var activityContext in activityContexts)
{
activityDesignShapes.Add(await BuildActivityDisplay(activityContext, index++, id, newLocalId, "Design"));
}
var activitiesDataQuery = activityContexts.Select(x => new
{
Id = x.ActivityRecord.ActivityId,
X = x.ActivityRecord.X,
Y = x.ActivityRecord.Y,
Name = x.ActivityRecord.Name,
IsStart = x.ActivityRecord.IsStart,
IsEvent = x.Activity.IsEvent(),
Outcomes = x.Activity.GetPossibleOutcomes(workflowContext, x).ToArray()
});
var workflowTypeData = new
{
Id = workflowType.Id,
Name = workflowType.Name,
IsEnabled = workflowType.IsEnabled,
Activities = activitiesDataQuery.ToArray(),
Transitions = workflowType.Transitions
};
var viewModel = new WorkflowTypeViewModel
{
WorkflowType = workflowType,
WorkflowTypeJson = JsonConvert.SerializeObject(workflowTypeData, Formatting.None, new JsonSerializerSettings { ContractResolver = new CamelCasePropertyNamesContractResolver() }),
ActivityThumbnailShapes = activityThumbnailShapes,
ActivityDesignShapes = activityDesignShapes,
ActivityCategories = _activityLibrary.ListCategories().ToList(),
LocalId = newLocalId,
LoadLocalState = !string.IsNullOrWhiteSpace(localId),
WorkflowCount = workflowCount
};
return View(viewModel);
}
[HttpPost]
public async Task<IActionResult> Edit(WorkflowTypeUpdateModel model)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
var workflowType = await _workflowTypeStore.GetAsync(model.Id);
dynamic state = JObject.Parse(model.State);
var currentActivities = workflowType.Activities.ToDictionary(x => x.ActivityId);
var postedActivities = ((IEnumerable<dynamic>)state.activities).ToDictionary(x => (string)x.id);
var removedActivityIdsQuery =
from activityId in currentActivities.Keys
where !postedActivities.ContainsKey(activityId)
select activityId;
var removedActivityIds = removedActivityIdsQuery.ToList();
// Remove any orphans (activities deleted on the client).
foreach (var activityId in removedActivityIds)
{
var activityToRemove = currentActivities[activityId];
workflowType.Activities.Remove(activityToRemove);
currentActivities.Remove(activityId);
}
// Update activities.
foreach (var activityState in state.activities)
{
var activity = currentActivities[(string)activityState.id];
activity.X = activityState.x;
activity.Y = activityState.y;
activity.IsStart = activityState.isStart;
}
// Update transitions.
workflowType.Transitions.Clear();
foreach (var transitionState in state.transitions)
{
workflowType.Transitions.Add(new Transition
{
SourceActivityId = transitionState.sourceActivityId,
DestinationActivityId = transitionState.destinationActivityId,
SourceOutcomeName = transitionState.sourceOutcomeName
});
}
await _workflowTypeStore.SaveAsync(workflowType);
await _session.CommitAsync();
_notifier.Success(H["Workflow type has been saved."]);
return RedirectToAction(nameof(Edit), new { id = model.Id });
}
[HttpPost]
public async Task<IActionResult> Delete(int id)
{
if (!await _authorizationService.AuthorizeAsync(User, Permissions.ManageWorkflows))
{
return Forbid();
}
var workflowType = await _workflowTypeStore.GetAsync(id);
if (workflowType == null)
{
return NotFound();
}
await _workflowTypeStore.DeleteAsync(workflowType);
_notifier.Success(H["Workflow type {0} deleted", workflowType.Name]);
return RedirectToAction("Index");
}
private async Task<dynamic> BuildActivityDisplay(IActivity activity, int index, int workflowTypeId, string localId, string displayType)
{
dynamic activityShape = await _activityDisplayManager.BuildDisplayAsync(activity, _updateModelAccessor.ModelUpdater, displayType);
activityShape.Metadata.Type = $"Activity_{displayType}";
activityShape.Activity = activity;
activityShape.WorkflowTypeId = workflowTypeId;
activityShape.Index = index;
activityShape.ReturnUrl = Url.Action(nameof(Edit), new { id = workflowTypeId, localId = localId });
return activityShape;
}
private async Task<dynamic> BuildActivityDisplay(ActivityContext activityContext, int index, int workflowTypeId, string localId, string displayType)
{
dynamic activityShape = await _activityDisplayManager.BuildDisplayAsync(activityContext.Activity, _updateModelAccessor.ModelUpdater, displayType);
activityShape.Metadata.Type = $"Activity_{displayType}";
activityShape.Activity = activityContext.Activity;
activityShape.ActivityRecord = activityContext.ActivityRecord;
activityShape.WorkflowTypeId = workflowTypeId;
activityShape.Index = index;
activityShape.ReturnUrl = Url.Action(nameof(Edit), new { id = workflowTypeId, localId = localId });
return activityShape;
}
}
}
| |
#nullable enable
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading;
using System.Threading.Tasks;
using BTCPayServer.Client.Models;
using BTCPayServer.Data;
using BTCPayServer.Events;
using BTCPayServer.HostedServices;
using BTCPayServer.Logging;
using BTCPayServer.Models;
using BTCPayServer.Payments;
using BTCPayServer.Rating;
using BTCPayServer.Security;
using BTCPayServer.Services;
using BTCPayServer.Services.Apps;
using BTCPayServer.Services.Invoices;
using BTCPayServer.Services.Rates;
using BTCPayServer.Services.Stores;
using BTCPayServer.Validation;
using Microsoft.AspNetCore.Identity;
using Microsoft.AspNetCore.Mvc;
using NBitpayClient;
using BitpayCreateInvoiceRequest = BTCPayServer.Models.BitpayCreateInvoiceRequest;
using StoreData = BTCPayServer.Data.StoreData;
namespace BTCPayServer.Controllers
{
[Filters.BitpayAPIConstraint(false)]
public partial class UIInvoiceController : Controller
{
readonly InvoiceRepository _InvoiceRepository;
readonly RateFetcher _RateProvider;
readonly StoreRepository _StoreRepository;
readonly UserManager<ApplicationUser> _UserManager;
private readonly CurrencyNameTable _CurrencyNameTable;
readonly EventAggregator _EventAggregator;
readonly BTCPayNetworkProvider _NetworkProvider;
private readonly PaymentMethodHandlerDictionary _paymentMethodHandlerDictionary;
private readonly ApplicationDbContextFactory _dbContextFactory;
private readonly PullPaymentHostedService _paymentHostedService;
private readonly LanguageService _languageService;
private readonly ExplorerClientProvider _ExplorerClients;
private readonly UIWalletsController _walletsController;
public WebhookSender WebhookNotificationManager { get; }
public UIInvoiceController(
InvoiceRepository invoiceRepository,
CurrencyNameTable currencyNameTable,
UserManager<ApplicationUser> userManager,
RateFetcher rateProvider,
StoreRepository storeRepository,
EventAggregator eventAggregator,
ContentSecurityPolicies csp,
BTCPayNetworkProvider networkProvider,
PaymentMethodHandlerDictionary paymentMethodHandlerDictionary,
ApplicationDbContextFactory dbContextFactory,
PullPaymentHostedService paymentHostedService,
WebhookSender webhookNotificationManager,
LanguageService languageService,
ExplorerClientProvider explorerClients,
UIWalletsController walletsController)
{
_CurrencyNameTable = currencyNameTable ?? throw new ArgumentNullException(nameof(currencyNameTable));
_StoreRepository = storeRepository ?? throw new ArgumentNullException(nameof(storeRepository));
_InvoiceRepository = invoiceRepository ?? throw new ArgumentNullException(nameof(invoiceRepository));
_RateProvider = rateProvider ?? throw new ArgumentNullException(nameof(rateProvider));
_UserManager = userManager;
_EventAggregator = eventAggregator;
_NetworkProvider = networkProvider;
_paymentMethodHandlerDictionary = paymentMethodHandlerDictionary;
_dbContextFactory = dbContextFactory;
_paymentHostedService = paymentHostedService;
WebhookNotificationManager = webhookNotificationManager;
_languageService = languageService;
this._ExplorerClients = explorerClients;
_walletsController = walletsController;
}
internal async Task<DataWrapper<InvoiceResponse>> CreateInvoiceCore(BitpayCreateInvoiceRequest invoice,
StoreData store, string serverUrl, List<string>? additionalTags = null,
CancellationToken cancellationToken = default)
{
var entity = await CreateInvoiceCoreRaw(invoice, store, serverUrl, additionalTags, cancellationToken);
var resp = entity.EntityToDTO();
return new DataWrapper<InvoiceResponse>(resp) { Facade = "pos/invoice" };
}
internal async Task<InvoiceEntity> CreateInvoiceCoreRaw(BitpayCreateInvoiceRequest invoice, StoreData store, string serverUrl, List<string>? additionalTags = null, CancellationToken cancellationToken = default)
{
var storeBlob = store.GetStoreBlob();
var entity = _InvoiceRepository.CreateNewInvoice();
entity.ExpirationTime = invoice.ExpirationTime is DateTimeOffset v ? v : entity.InvoiceTime + storeBlob.InvoiceExpiration;
entity.MonitoringExpiration = entity.ExpirationTime + storeBlob.MonitoringExpiration;
if (entity.ExpirationTime - TimeSpan.FromSeconds(30.0) < entity.InvoiceTime)
{
throw new BitpayHttpException(400, "The expirationTime is set too soon");
}
entity.Metadata.OrderId = invoice.OrderId;
entity.Metadata.PosData = invoice.PosData;
entity.ServerUrl = serverUrl;
entity.FullNotifications = invoice.FullNotifications || invoice.ExtendedNotifications;
entity.ExtendedNotifications = invoice.ExtendedNotifications;
entity.NotificationURLTemplate = invoice.NotificationURL;
entity.NotificationEmail = invoice.NotificationEmail;
if (additionalTags != null)
entity.InternalTags.AddRange(additionalTags);
FillBuyerInfo(invoice, entity);
var taxIncluded = invoice.TaxIncluded.HasValue ? invoice.TaxIncluded.Value : 0m;
var price = invoice.Price;
entity.Metadata.ItemCode = invoice.ItemCode;
entity.Metadata.ItemDesc = invoice.ItemDesc;
entity.Metadata.Physical = invoice.Physical;
entity.Metadata.TaxIncluded = invoice.TaxIncluded;
entity.Currency = invoice.Currency;
if (price is decimal vv)
{
entity.Price = vv;
entity.Type = InvoiceType.Standard;
}
else
{
entity.Price = 0m;
entity.Type = InvoiceType.TopUp;
}
entity.RedirectURLTemplate = invoice.RedirectURL ?? store.StoreWebsite;
entity.RedirectAutomatically =
invoice.RedirectAutomatically.GetValueOrDefault(storeBlob.RedirectAutomatically);
entity.RequiresRefundEmail = invoice.RequiresRefundEmail;
entity.SpeedPolicy = ParseSpeedPolicy(invoice.TransactionSpeed, store.SpeedPolicy);
IPaymentFilter? excludeFilter = null;
if (invoice.PaymentCurrencies?.Any() is true)
{
invoice.SupportedTransactionCurrencies ??=
new Dictionary<string, InvoiceSupportedTransactionCurrency>();
foreach (string paymentCurrency in invoice.PaymentCurrencies)
{
invoice.SupportedTransactionCurrencies.TryAdd(paymentCurrency,
new InvoiceSupportedTransactionCurrency() { Enabled = true });
}
}
if (invoice.SupportedTransactionCurrencies != null && invoice.SupportedTransactionCurrencies.Count != 0)
{
var supportedTransactionCurrencies = invoice.SupportedTransactionCurrencies
.Where(c => c.Value.Enabled)
.Select(c => PaymentMethodId.TryParse(c.Key, out var p) ? p : null)
.Where(c => c != null)
.ToHashSet();
excludeFilter = PaymentFilter.Where(p => !supportedTransactionCurrencies.Contains(p));
}
entity.PaymentTolerance = storeBlob.PaymentTolerance;
entity.DefaultPaymentMethod = invoice.DefaultPaymentMethod;
entity.RequiresRefundEmail = invoice.RequiresRefundEmail;
return await CreateInvoiceCoreRaw(entity, store, excludeFilter, null, cancellationToken);
}
internal async Task<InvoiceEntity> CreateInvoiceCoreRaw(CreateInvoiceRequest invoice, StoreData store, string serverUrl, List<string>? additionalTags = null, CancellationToken cancellationToken = default)
{
var storeBlob = store.GetStoreBlob();
var entity = _InvoiceRepository.CreateNewInvoice();
entity.ServerUrl = serverUrl;
entity.ExpirationTime = entity.InvoiceTime + (invoice.Checkout.Expiration ?? storeBlob.InvoiceExpiration);
entity.MonitoringExpiration = entity.ExpirationTime + (invoice.Checkout.Monitoring ?? storeBlob.MonitoringExpiration);
if (invoice.Metadata != null)
entity.Metadata = InvoiceMetadata.FromJObject(invoice.Metadata);
invoice.Checkout ??= new CreateInvoiceRequest.CheckoutOptions();
entity.Currency = invoice.Currency;
if (invoice.Amount is decimal v)
{
entity.Price = v;
entity.Type = InvoiceType.Standard;
}
else
{
entity.Price = 0.0m;
entity.Type = InvoiceType.TopUp;
}
entity.SpeedPolicy = invoice.Checkout.SpeedPolicy ?? store.SpeedPolicy;
entity.DefaultLanguage = invoice.Checkout.DefaultLanguage;
entity.DefaultPaymentMethod = invoice.Checkout.DefaultPaymentMethod;
entity.RedirectAutomatically = invoice.Checkout.RedirectAutomatically ?? storeBlob.RedirectAutomatically;
entity.RequiresRefundEmail = invoice.Checkout.RequiresRefundEmail;
IPaymentFilter? excludeFilter = null;
if (invoice.Checkout.PaymentMethods != null)
{
var supportedTransactionCurrencies = invoice.Checkout.PaymentMethods
.Select(c => PaymentMethodId.TryParse(c, out var p) ? p : null)
.ToHashSet();
excludeFilter = PaymentFilter.Where(p => !supportedTransactionCurrencies.Contains(p));
}
entity.PaymentTolerance = invoice.Checkout.PaymentTolerance ?? storeBlob.PaymentTolerance;
entity.RedirectURLTemplate = invoice.Checkout.RedirectURL?.Trim();
entity.RequiresRefundEmail = invoice.Checkout.RequiresRefundEmail;
if (additionalTags != null)
entity.InternalTags.AddRange(additionalTags);
return await CreateInvoiceCoreRaw(entity, store, excludeFilter, invoice.AdditionalSearchTerms, cancellationToken);
}
internal async Task<InvoiceEntity> CreateInvoiceCoreRaw(InvoiceEntity entity, StoreData store, IPaymentFilter? invoicePaymentMethodFilter, string[]? additionalSearchTerms = null, CancellationToken cancellationToken = default)
{
InvoiceLogs logs = new InvoiceLogs();
logs.Write("Creation of invoice starting", InvoiceEventData.EventSeverity.Info);
var storeBlob = store.GetStoreBlob();
if (string.IsNullOrEmpty(entity.Currency))
entity.Currency = storeBlob.DefaultCurrency;
entity.Currency = entity.Currency.Trim().ToUpperInvariant();
entity.Price = Math.Max(0.0m, entity.Price);
var currencyInfo = _CurrencyNameTable.GetNumberFormatInfo(entity.Currency, false);
if (currencyInfo != null)
{
entity.Price = entity.Price.RoundToSignificant(currencyInfo.CurrencyDecimalDigits);
}
if (entity.Metadata.TaxIncluded is decimal taxIncluded)
{
if (currencyInfo != null)
{
taxIncluded = taxIncluded.RoundToSignificant(currencyInfo.CurrencyDecimalDigits);
}
taxIncluded = Math.Max(0.0m, taxIncluded);
taxIncluded = Math.Min(taxIncluded, entity.Price);
entity.Metadata.TaxIncluded = taxIncluded;
}
var getAppsTaggingStore = _InvoiceRepository.GetAppsTaggingStore(store.Id);
if (entity.Metadata.BuyerEmail != null)
{
if (!EmailValidator.IsEmail(entity.Metadata.BuyerEmail))
throw new BitpayHttpException(400, "Invalid email");
entity.RefundMail = entity.Metadata.BuyerEmail;
}
entity.Status = InvoiceStatusLegacy.New;
HashSet<CurrencyPair> currencyPairsToFetch = new HashSet<CurrencyPair>();
var rules = storeBlob.GetRateRules(_NetworkProvider);
var excludeFilter = storeBlob.GetExcludedPaymentMethods(); // Here we can compose filters from other origin with PaymentFilter.Any()
if (invoicePaymentMethodFilter != null)
{
excludeFilter = PaymentFilter.Or(excludeFilter,
invoicePaymentMethodFilter);
}
foreach (var network in store.GetSupportedPaymentMethods(_NetworkProvider)
.Where(s => !excludeFilter.Match(s.PaymentId))
.Select(c => _NetworkProvider.GetNetwork<BTCPayNetworkBase>(c.PaymentId.CryptoCode))
.Where(c => c != null))
{
currencyPairsToFetch.Add(new CurrencyPair(network.CryptoCode, entity.Currency));
foreach (var paymentMethodCriteria in storeBlob.PaymentMethodCriteria)
{
if (paymentMethodCriteria.Value != null)
{
currencyPairsToFetch.Add(new CurrencyPair(network.CryptoCode, paymentMethodCriteria.Value.Currency));
}
}
}
var rateRules = storeBlob.GetRateRules(_NetworkProvider);
var fetchingByCurrencyPair = _RateProvider.FetchRates(currencyPairsToFetch, rateRules, cancellationToken);
var fetchingAll = WhenAllFetched(logs, fetchingByCurrencyPair);
List<ISupportedPaymentMethod> supported = new List<ISupportedPaymentMethod>();
var paymentMethods = new PaymentMethodDictionary();
bool noNeedForMethods = entity.Type != InvoiceType.TopUp && entity.Price == 0m;
if (!noNeedForMethods)
{
// This loop ends with .ToList so we are querying all payment methods at once
// instead of sequentially to improve response time
foreach (var o in store.GetSupportedPaymentMethods(_NetworkProvider)
.Where(s => !excludeFilter.Match(s.PaymentId) &&
_paymentMethodHandlerDictionary.Support(s.PaymentId))
.Select(c =>
(Handler: _paymentMethodHandlerDictionary[c.PaymentId],
SupportedPaymentMethod: c,
Network: _NetworkProvider.GetNetwork<BTCPayNetworkBase>(c.PaymentId.CryptoCode)))
.Where(c => c.Network != null)
.Select(o =>
(SupportedPaymentMethod: o.SupportedPaymentMethod,
PaymentMethod: CreatePaymentMethodAsync(fetchingByCurrencyPair, o.Handler,
o.SupportedPaymentMethod, o.Network, entity, store, logs)))
.ToList())
{
var paymentMethod = await o.PaymentMethod;
if (paymentMethod == null)
continue;
supported.Add(o.SupportedPaymentMethod);
paymentMethods.Add(paymentMethod);
}
if (supported.Count == 0)
{
StringBuilder errors = new StringBuilder();
if (!store.GetSupportedPaymentMethods(_NetworkProvider).Any())
errors.AppendLine(
"Warning: No wallet has been linked to your BTCPay Store. See the following link for more information on how to connect your store and wallet. (https://docs.btcpayserver.org/WalletSetup/)");
foreach (var error in logs.ToList())
{
errors.AppendLine(error.ToString());
}
throw new BitpayHttpException(400, errors.ToString());
}
}
entity.SetSupportedPaymentMethods(supported);
entity.SetPaymentMethods(paymentMethods);
foreach (var app in await getAppsTaggingStore)
{
entity.InternalTags.Add(AppService.GetAppInternalTag(app.Id));
}
using (logs.Measure("Saving invoice"))
{
entity = await _InvoiceRepository.CreateInvoiceAsync(store.Id, entity, additionalSearchTerms);
}
_ = Task.Run(async () =>
{
try
{
await fetchingAll;
}
catch (AggregateException ex)
{
ex.Handle(e => { logs.Write($"Error while fetching rates {ex}", InvoiceEventData.EventSeverity.Error); return true; });
}
await _InvoiceRepository.AddInvoiceLogs(entity.Id, logs);
});
_EventAggregator.Publish(new Events.InvoiceEvent(entity, InvoiceEvent.Created));
return entity;
}
private Task WhenAllFetched(InvoiceLogs logs, Dictionary<CurrencyPair, Task<RateResult>> fetchingByCurrencyPair)
{
return Task.WhenAll(fetchingByCurrencyPair.Select(async pair =>
{
var rateResult = await pair.Value;
logs.Write($"{pair.Key}: The rating rule is {rateResult.Rule}", InvoiceEventData.EventSeverity.Info);
logs.Write($"{pair.Key}: The evaluated rating rule is {rateResult.EvaluatedRule}", InvoiceEventData.EventSeverity.Info);
if (rateResult.Errors.Count != 0)
{
var allRateRuleErrors = string.Join(", ", rateResult.Errors.ToArray());
logs.Write($"{pair.Key}: Rate rule error ({allRateRuleErrors})", InvoiceEventData.EventSeverity.Error);
}
foreach (var ex in rateResult.ExchangeExceptions)
{
logs.Write($"{pair.Key}: Exception reaching exchange {ex.ExchangeName} ({ex.Exception.Message})", InvoiceEventData.EventSeverity.Error);
}
}).ToArray());
}
private async Task<PaymentMethod?> CreatePaymentMethodAsync(Dictionary<CurrencyPair, Task<RateResult>> fetchingByCurrencyPair,
IPaymentMethodHandler handler, ISupportedPaymentMethod supportedPaymentMethod, BTCPayNetworkBase network, InvoiceEntity entity,
StoreData store, InvoiceLogs logs)
{
try
{
var logPrefix = $"{supportedPaymentMethod.PaymentId.ToPrettyString()}:";
var storeBlob = store.GetStoreBlob();
object? preparePayment;
if (storeBlob.LazyPaymentMethods)
{
preparePayment = null;
}
else
{
preparePayment = handler.PreparePayment(supportedPaymentMethod, store, network);
}
var rate = await fetchingByCurrencyPair[new CurrencyPair(network.CryptoCode, entity.Currency)];
if (rate.BidAsk == null)
{
return null;
}
var paymentMethod = new PaymentMethod
{
ParentEntity = entity,
Network = network,
Rate = rate.BidAsk.Bid,
PreferOnion = Uri.TryCreate(entity.ServerUrl, UriKind.Absolute, out var u) && u.DnsSafeHost.EndsWith(".onion", StringComparison.OrdinalIgnoreCase)
};
paymentMethod.SetId(supportedPaymentMethod.PaymentId);
using (logs.Measure($"{logPrefix} Payment method details creation"))
{
var paymentDetails = await handler.CreatePaymentMethodDetails(logs, supportedPaymentMethod, paymentMethod, store, network, preparePayment);
paymentMethod.SetPaymentMethodDetails(paymentDetails);
}
var criteria = storeBlob.PaymentMethodCriteria?.Find(methodCriteria => methodCriteria.PaymentMethod == supportedPaymentMethod.PaymentId);
if (criteria?.Value != null && entity.Type != InvoiceType.TopUp)
{
var currentRateToCrypto =
await fetchingByCurrencyPair[new CurrencyPair(supportedPaymentMethod.PaymentId.CryptoCode, criteria.Value.Currency)];
if (currentRateToCrypto?.BidAsk != null)
{
var amount = paymentMethod.Calculate().Due.GetValue(network as BTCPayNetwork);
var limitValueCrypto = criteria.Value.Value / currentRateToCrypto.BidAsk.Bid;
if (amount < limitValueCrypto && criteria.Above)
{
logs.Write($"{logPrefix} invoice amount below accepted value for payment method", InvoiceEventData.EventSeverity.Error);
return null;
}
if (amount > limitValueCrypto && !criteria.Above)
{
logs.Write($"{logPrefix} invoice amount above accepted value for payment method", InvoiceEventData.EventSeverity.Error);
return null;
}
}
else
{
var suffix = currentRateToCrypto?.EvaluatedRule is string s ? $" ({s})" : string.Empty;
logs.Write($"{logPrefix} This payment method should be created only if the amount of this invoice is in proper range. However, we are unable to fetch the rate of those limits. {suffix}", InvoiceEventData.EventSeverity.Warning);
}
}
#pragma warning disable CS0618
if (paymentMethod.GetId().IsBTCOnChain)
{
entity.TxFee = paymentMethod.NextNetworkFee;
entity.Rate = paymentMethod.Rate;
entity.DepositAddress = paymentMethod.DepositAddress;
}
#pragma warning restore CS0618
return paymentMethod;
}
catch (PaymentMethodUnavailableException ex)
{
logs.Write($"{supportedPaymentMethod.PaymentId.CryptoCode}: Payment method unavailable ({ex.Message})", InvoiceEventData.EventSeverity.Error);
}
catch (Exception ex)
{
logs.Write($"{supportedPaymentMethod.PaymentId.CryptoCode}: Unexpected exception ({ex})", InvoiceEventData.EventSeverity.Error);
}
return null;
}
private SpeedPolicy ParseSpeedPolicy(string transactionSpeed, SpeedPolicy defaultPolicy)
{
if (transactionSpeed == null)
return defaultPolicy;
var mappings = new Dictionary<string, SpeedPolicy>();
mappings.Add("low", SpeedPolicy.LowSpeed);
mappings.Add("low-medium", SpeedPolicy.LowMediumSpeed);
mappings.Add("medium", SpeedPolicy.MediumSpeed);
mappings.Add("high", SpeedPolicy.HighSpeed);
if (!mappings.TryGetValue(transactionSpeed, out SpeedPolicy policy))
policy = defaultPolicy;
return policy;
}
private void FillBuyerInfo(BitpayCreateInvoiceRequest req, InvoiceEntity invoiceEntity)
{
var buyerInformation = invoiceEntity.Metadata;
buyerInformation.BuyerAddress1 = req.BuyerAddress1;
buyerInformation.BuyerAddress2 = req.BuyerAddress2;
buyerInformation.BuyerCity = req.BuyerCity;
buyerInformation.BuyerCountry = req.BuyerCountry;
buyerInformation.BuyerEmail = req.BuyerEmail;
buyerInformation.BuyerName = req.BuyerName;
buyerInformation.BuyerPhone = req.BuyerPhone;
buyerInformation.BuyerState = req.BuyerState;
buyerInformation.BuyerZip = req.BuyerZip;
var buyer = req.Buyer;
if (buyer == null)
return;
buyerInformation.BuyerAddress1 ??= buyer.Address1;
buyerInformation.BuyerAddress2 ??= buyer.Address2;
buyerInformation.BuyerCity ??= buyer.City;
buyerInformation.BuyerCountry ??= buyer.country;
buyerInformation.BuyerEmail ??= buyer.email;
buyerInformation.BuyerName ??= buyer.Name;
buyerInformation.BuyerPhone ??= buyer.phone;
buyerInformation.BuyerState ??= buyer.State;
buyerInformation.BuyerZip ??= buyer.zip;
}
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Security.Authentication;
using System.Threading.Tasks;
using HtmlAgilityPack;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
namespace RedditSharp.Things
{
public class Subreddit : Thing
{
private const string SubredditPostUrl = "/r/{0}.json";
private const string SubredditNewUrl = "/r/{0}/new.json?sort=new";
private const string SubredditHotUrl = "/r/{0}/hot.json";
private const string SubredditTopUrl = "/r/{0}/top.json?t={1}";
private const string SubscribeUrl = "/api/subscribe";
private const string GetSettingsUrl = "/r/{0}/about/edit.json";
private const string GetReducedSettingsUrl = "/r/{0}/about.json";
private const string ModqueueUrl = "/r/{0}/about/modqueue.json";
private const string UnmoderatedUrl = "/r/{0}/about/unmoderated.json";
private const string FlairTemplateUrl = "/api/flairtemplate";
private const string ClearFlairTemplatesUrl = "/api/clearflairtemplates";
private const string SetUserFlairUrl = "/api/flair";
private const string StylesheetUrl = "/r/{0}/about/stylesheet.json";
private const string UploadImageUrl = "/api/upload_sr_img";
private const string FlairSelectorUrl = "/api/flairselector";
private const string AcceptModeratorInviteUrl = "/api/accept_moderator_invite";
private const string LeaveModerationUrl = "/api/unfriend";
private const string BanUserUrl = "/api/friend";
private const string AddModeratorUrl = "/api/friend";
private const string AddContributorUrl = "/api/friend";
private const string ModeratorsUrl = "/r/{0}/about/moderators.json";
private const string FrontPageUrl = "/.json";
private const string SubmitLinkUrl = "/api/submit";
private const string FlairListUrl = "/r/{0}/api/flairlist.json";
private const string CommentsUrl = "/r/{0}/comments.json";
private const string SearchUrl = "/r/{0}/search.json?q={1}&restrict_sr=on&sort={2}&t={3}";
[JsonIgnore]
private Reddit Reddit { get; set; }
[JsonIgnore]
private IWebAgent WebAgent { get; set; }
[JsonIgnore]
public Wiki Wiki { get; private set; }
[JsonProperty("created")]
[JsonConverter(typeof(UnixTimestampConverter))]
public DateTime? Created { get; set; }
[JsonProperty("description")]
public string Description { get; set; }
[JsonProperty("description_html")]
public string DescriptionHTML { get; set; }
[JsonProperty("display_name")]
public string DisplayName { get; set; }
[JsonProperty("header_img")]
public string HeaderImage { get; set; }
[JsonProperty("header_title")]
public string HeaderTitle { get; set; }
[JsonProperty("over18")]
public bool? NSFW { get; set; }
[JsonProperty("public_description")]
public string PublicDescription { get; set; }
[JsonProperty("subscribers")]
public int? Subscribers { get; set; }
[JsonProperty("accounts_active")]
public int? ActiveUsers { get; set; }
[JsonProperty("title")]
public string Title { get; set; }
[JsonProperty("url")]
[JsonConverter(typeof(UrlParser))]
public Uri Url { get; set; }
/// <summary>
/// Property determining whether the current logged in user is a moderator on this subreddit.
/// </summary>
[JsonProperty("user_is_moderator")]
public bool UserIsModerator { get; set; }
/// <summary>
/// Property determining whether the current logged in user is banned from the subreddit.
/// </summary>
[JsonProperty("user_is_banned")]
public bool UserIsBanned { get; set; }
[JsonIgnore]
public string Name { get; set; }
public Listing<Post> GetTop(FromTime timePeriod)
{
if (Name == "/")
{
return new Listing<Post>(Reddit, "/top.json?t=" + Enum.GetName(typeof(FromTime), timePeriod).ToLower(), WebAgent);
}
return new Listing<Post>(Reddit, string.Format(SubredditTopUrl, Name, Enum.GetName(typeof(FromTime), timePeriod)).ToLower(), WebAgent);
}
public Listing<Post> Posts
{
get
{
if (Name == "/")
return new Listing<Post>(Reddit, "/.json", WebAgent);
return new Listing<Post>(Reddit, string.Format(SubredditPostUrl, Name), WebAgent);
}
}
public Listing<Comment> Comments
{
get
{
if (Name == "/")
return new Listing<Comment>(Reddit, "/comments.json", WebAgent);
return new Listing<Comment>(Reddit, string.Format(CommentsUrl, Name), WebAgent);
}
}
public Listing<Post> New
{
get
{
if (Name == "/")
return new Listing<Post>(Reddit, "/new.json", WebAgent);
return new Listing<Post>(Reddit, string.Format(SubredditNewUrl, Name), WebAgent);
}
}
public Listing<Post> Hot
{
get
{
if (Name == "/")
return new Listing<Post>(Reddit, "/.json", WebAgent);
return new Listing<Post>(Reddit, string.Format(SubredditHotUrl, Name), WebAgent);
}
}
public Listing<VotableThing> ModQueue
{
get
{
return new Listing<VotableThing>(Reddit, string.Format(ModqueueUrl, Name), WebAgent);
}
}
public Listing<Post> UnmoderatedLinks
{
get
{
return new Listing<Post>(Reddit, string.Format(UnmoderatedUrl, Name), WebAgent);
}
}
public Listing<Post> Search(string terms)
{
return new Listing<Post>(Reddit, string.Format(SearchUrl, Name, Uri.EscapeUriString(terms), "relevance", "all"), WebAgent);
}
public SubredditSettings Settings
{
get
{
if (Reddit.User == null)
throw new AuthenticationException("No user logged in.");
try
{
var request = WebAgent.CreateGet(string.Format(GetSettingsUrl, Name));
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var json = JObject.Parse(data);
return new SubredditSettings(this, Reddit, json, WebAgent);
}
catch // TODO: More specific catch
{
// Do it unauthed
var request = WebAgent.CreateGet(string.Format(GetReducedSettingsUrl, Name));
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var json = JObject.Parse(data);
return new SubredditSettings(this, Reddit, json, WebAgent);
}
}
}
public UserFlairTemplate[] UserFlairTemplates // Hacky, there isn't a proper endpoint for this
{
get
{
var request = WebAgent.CreatePost(FlairSelectorUrl);
var stream = request.GetRequestStream();
WebAgent.WritePostBody(stream, new
{
name = Reddit.User.Name,
r = Name,
uh = Reddit.User.Modhash
});
stream.Close();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var document = new HtmlDocument();
document.LoadHtml(data);
if (document.DocumentNode.Descendants("div").First().Attributes["error"] != null)
throw new InvalidOperationException("This subreddit does not allow users to select flair.");
var templateNodes = document.DocumentNode.Descendants("li");
var list = new List<UserFlairTemplate>();
foreach (var node in templateNodes)
{
list.Add(new UserFlairTemplate
{
CssClass = node.Descendants("span").First().Attributes["class"].Value.Split(' ')[1],
Text = node.Descendants("span").First().InnerText
});
}
return list.ToArray();
}
}
public SubredditStyle Stylesheet
{
get
{
var request = WebAgent.CreateGet(string.Format(StylesheetUrl, Name));
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var json = JToken.Parse(data);
return new SubredditStyle(Reddit, this, json, WebAgent);
}
}
public IEnumerable<ModeratorUser> Moderators
{
get
{
var request = WebAgent.CreateGet(string.Format(ModeratorsUrl, Name));
var response = request.GetResponse();
var responseString = WebAgent.GetResponseString(response.GetResponseStream());
var json = JObject.Parse(responseString);
var type = json["kind"].ToString();
if (type != "UserList")
throw new FormatException("Reddit responded with an object that is not a user listing.");
var data = json["data"];
var mods = data["children"].ToArray();
var result = new ModeratorUser[mods.Length];
for (var i = 0; i < mods.Length; i++)
{
var mod = new ModeratorUser(Reddit, mods[i]);
result[i] = mod;
}
return result;
}
}
public Subreddit Init(Reddit reddit, JToken json, IWebAgent webAgent)
{
CommonInit(reddit, json, webAgent);
JsonConvert.PopulateObject(json["data"].ToString(), this, reddit.JsonSerializerSettings);
SetName();
return this;
}
public async Task<Subreddit> InitAsync(Reddit reddit, JToken json, IWebAgent webAgent)
{
CommonInit(reddit, json, webAgent);
await Task.Factory.StartNew(() => JsonConvert.PopulateObject(json["data"].ToString(), this, reddit.JsonSerializerSettings));
SetName();
return this;
}
private void SetName()
{
Name = Url.ToString();
if (Name.StartsWith("/r/"))
Name = Name.Substring(3);
if (Name.StartsWith("r/"))
Name = Name.Substring(2);
Name = Name.TrimEnd('/');
}
private void CommonInit(Reddit reddit, JToken json, IWebAgent webAgent)
{
base.Init(json);
Reddit = reddit;
WebAgent = webAgent;
Wiki = new Wiki(reddit, this, webAgent);
}
public static Subreddit GetRSlashAll(Reddit reddit)
{
var rSlashAll = new Subreddit
{
DisplayName = "/r/all",
Title = "/r/all",
Url = new Uri("/r/all", UriKind.Relative),
Name = "all",
Reddit = reddit,
WebAgent = reddit._webAgent
};
return rSlashAll;
}
public static Subreddit GetFrontPage(Reddit reddit)
{
var frontPage = new Subreddit
{
DisplayName = "Front Page",
Title = "reddit: the front page of the internet",
Url = new Uri("/", UriKind.Relative),
Name = "/",
Reddit = reddit,
WebAgent = reddit._webAgent
};
return frontPage;
}
public void Subscribe()
{
if (Reddit.User == null)
throw new AuthenticationException("No user logged in.");
var request = WebAgent.CreatePost(SubscribeUrl);
var stream = request.GetRequestStream();
WebAgent.WritePostBody(stream, new
{
action = "sub",
sr = FullName,
uh = Reddit.User.Modhash
});
stream.Close();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
// Discard results
}
public void Unsubscribe()
{
if (Reddit.User == null)
throw new AuthenticationException("No user logged in.");
var request = WebAgent.CreatePost(SubscribeUrl);
var stream = request.GetRequestStream();
WebAgent.WritePostBody(stream, new
{
action = "unsub",
sr = FullName,
uh = Reddit.User.Modhash
});
stream.Close();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
// Discard results
}
public void ClearFlairTemplates(FlairType flairType)
{
var request = WebAgent.CreatePost(ClearFlairTemplatesUrl);
var stream = request.GetRequestStream();
WebAgent.WritePostBody(stream, new
{
flair_type = flairType == FlairType.Link ? "LINK_FLAIR" : "USER_FLAIR",
uh = Reddit.User.Modhash,
r = Name
});
stream.Close();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
}
public void AddFlairTemplate(string cssClass, FlairType flairType, string text, bool userEditable)
{
var request = WebAgent.CreatePost(FlairTemplateUrl);
var stream = request.GetRequestStream();
WebAgent.WritePostBody(stream, new
{
css_class = cssClass,
flair_type = flairType == FlairType.Link ? "LINK_FLAIR" : "USER_FLAIR",
text = text,
text_editable = userEditable,
uh = Reddit.User.Modhash,
r = Name,
api_type = "json"
});
stream.Close();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var json = JToken.Parse(data);
}
public string GetFlairText(string user)
{
var request = WebAgent.CreateGet(String.Format(FlairListUrl + "?name=" + user, Name));
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var json = JToken.Parse(data);
return (string)json["users"][0]["flair_text"];
}
public string GetFlairCssClass(string user)
{
var request = WebAgent.CreateGet(String.Format(FlairListUrl + "?name=" + user, Name));
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
var json = JToken.Parse(data);
return (string)json["users"][0]["flair_css_class"];
}
public void SetUserFlair(string user, string cssClass, string text)
{
var request = WebAgent.CreatePost(SetUserFlairUrl);
var stream = request.GetRequestStream();
WebAgent.WritePostBody(stream, new
{
css_class = cssClass,
text = text,
uh = Reddit.User.Modhash,
r = Name,
name = user
});
stream.Close();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
}
public void UploadHeaderImage(string name, ImageType imageType, byte[] file)
{
var request = WebAgent.CreatePost(UploadImageUrl);
var formData = new MultipartFormBuilder(request);
formData.AddDynamic(new
{
name,
uh = Reddit.User.Modhash,
r = Name,
formid = "image-upload",
img_type = imageType == ImageType.PNG ? "png" : "jpg",
upload = "",
header = 1
});
formData.AddFile("file", "foo.png", file, imageType == ImageType.PNG ? "image/png" : "image/jpeg");
formData.Finish();
var response = request.GetResponse();
var data = WebAgent.GetResponseString(response.GetResponseStream());
// TODO: Detect errors
}
public void AddModerator(string user)
{
var request = WebAgent.CreatePost(AddModeratorUrl);
WebAgent.WritePostBody(request.GetRequestStream(), new
{
api_type = "json",
uh = Reddit.User.Modhash,
r = Name,
type = "moderator",
name = user
});
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
}
public void AcceptModeratorInvite()
{
var request = WebAgent.CreatePost(AcceptModeratorInviteUrl);
WebAgent.WritePostBody(request.GetRequestStream(), new
{
api_type = "json",
uh = Reddit.User.Modhash,
r = Name
});
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
}
public void RemoveModerator(string id)
{
var request = WebAgent.CreatePost(LeaveModerationUrl);
WebAgent.WritePostBody(request.GetRequestStream(), new
{
api_type = "json",
uh = Reddit.User.Modhash,
r = Name,
type = "moderator",
id
});
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
}
public override string ToString()
{
return "/r/" + DisplayName;
}
public void AddContributor(string user)
{
var request = WebAgent.CreatePost(AddContributorUrl);
WebAgent.WritePostBody(request.GetRequestStream(), new
{
api_type = "json",
uh = Reddit.User.Modhash,
r = Name,
type = "contributor",
name = user
});
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
}
public void RemoveContributor(string id)
{
var request = WebAgent.CreatePost(LeaveModerationUrl);
WebAgent.WritePostBody(request.GetRequestStream(), new
{
api_type = "json",
uh = Reddit.User.Modhash,
r = Name,
type = "contributor",
id
});
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
}
public void BanUser(string user, string reason)
{
var request = WebAgent.CreatePost(BanUserUrl);
WebAgent.WritePostBody(request.GetRequestStream(), new
{
api_type = "json",
uh = Reddit.User.Modhash,
r = Name,
type = "banned",
id = "#banned",
name = user,
note = reason,
action = "add",
container = FullName
});
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
}
private Post Submit(SubmitData data)
{
if (Reddit.User == null)
throw new RedditException("No user logged in.");
var request = WebAgent.CreatePost(SubmitLinkUrl);
WebAgent.WritePostBody(request.GetRequestStream(), data);
var response = request.GetResponse();
var result = WebAgent.GetResponseString(response.GetResponseStream());
var json = JToken.Parse(result);
ICaptchaSolver solver = Reddit.CaptchaSolver;
if (json["json"]["errors"].Any() && json["json"]["errors"][0][0].ToString() == "BAD_CAPTCHA"
&& solver != null)
{
data.Iden = json["json"]["captcha"].ToString();
CaptchaResponse captchaResponse = solver.HandleCaptcha(new Captcha(data.Iden));
// We throw exception due to this method being expected to return a valid Post object, but we cannot
// if we got a Captcha error.
if (captchaResponse.Cancel)
throw new CaptchaFailedException("Captcha verification failed when submitting " + data.Kind + " post");
data.Captcha = captchaResponse.Answer;
return Submit(data);
}
else if (json["json"]["errors"].Any() && json["json"]["errors"][0][0].ToString() == "ALREADY_SUB")
{
throw new DuplicateLinkException(String.Format("Post failed when submitting. The following link has already been submitted: {0}", SubmitLinkUrl));
}
return new Post().Init(Reddit, json["json"], WebAgent);
}
/// <summary>
/// Submits a link post in the current subreddit using the logged-in user
/// </summary>
/// <param name="title">The title of the submission</param>
/// <param name="url">The url of the submission link</param>
public Post SubmitPost(string title, string url, string captchaId = "", string captchaAnswer = "", bool resubmit = false)
{
return
Submit(
new LinkData
{
Subreddit = Name,
UserHash = Reddit.User.Modhash,
Title = title,
URL = url,
Resubmit = resubmit,
Iden = captchaId,
Captcha = captchaAnswer
});
}
/// <summary>
/// Submits a text post in the current subreddit using the logged-in user
/// </summary>
/// <param name="title">The title of the submission</param>
/// <param name="text">The raw markdown text of the submission</param>
public Post SubmitTextPost(string title, string text, string captchaId = "", string captchaAnswer = "")
{
return
Submit(
new TextData
{
Subreddit = Name,
UserHash = Reddit.User.Modhash,
Title = title,
Text = text,
Iden = captchaId,
Captcha = captchaAnswer
});
}
#region Obsolete Getter Methods
[Obsolete("Use Posts property instead")]
public Listing<Post> GetPosts()
{
return Posts;
}
[Obsolete("Use New property instead")]
public Listing<Post> GetNew()
{
return New;
}
[Obsolete("Use Hot property instead")]
public Listing<Post> GetHot()
{
return Hot;
}
[Obsolete("Use ModQueue property instead")]
public Listing<VotableThing> GetModQueue()
{
return ModQueue;
}
[Obsolete("Use UnmoderatedLinks property instead")]
public Listing<Post> GetUnmoderatedLinks()
{
return UnmoderatedLinks;
}
[Obsolete("Use Settings property instead")]
public SubredditSettings GetSettings()
{
return Settings;
}
[Obsolete("Use UserFlairTemplates property instead")]
public UserFlairTemplate[] GetUserFlairTemplates() // Hacky, there isn't a proper endpoint for this
{
return UserFlairTemplates;
}
[Obsolete("Use Stylesheet property instead")]
public SubredditStyle GetStylesheet()
{
return Stylesheet;
}
[Obsolete("Use Moderators property instead")]
public IEnumerable<ModeratorUser> GetModerators()
{
return Moderators;
}
#endregion Obsolete Getter Methods
}
}
| |
#region License
//
// PrimitiveInlineList.cs July 2006
//
// Copyright (C) 2006, Niall Gallagher <[email protected]>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
// implied. See the License for the specific language governing
// permissions and limitations under the License.
//
#endregion
#region Using directives
using SimpleFramework.Xml.Strategy;
using SimpleFramework.Xml.Stream;
using System.Collections.Generic;
using System;
#endregion
namespace SimpleFramework.Xml.Core {
/// <summary>
/// The <c>PrimitiveInlineList</c> object is used to convert a
/// group of elements in to a collection of element entries. This is
/// used when a containing element for a list is not required. It
/// extracts the elements by matching elements to name of the type
/// that the annotated field or method requires. This enables these
/// element entries to exist as siblings to other objects within the
/// object. One restriction is that the <c>Root</c> annotation
/// for each of the types within the list must be the same.
/// </code>
/// <entry>example one</entry>
/// <entry>example two</entry>
/// <entry>example three</entry>
/// <entry>example four</entry>
/// </code>
/// For the above XML element list the element <c>entry</c> is
/// used to wrap the primitive string value. This wrapping XML element
/// is configurable and defaults to the lower case string for the name
/// of the class it represents. So, for example, if the primitive type
/// is an <c>int</c> the enclosing element will be called int.
/// </summary>
/// <seealso>
/// SimpleFramework.Xml.Core.Primitive
/// </seealso>
/// <seealso>
/// SimpleFramework.Xml.ElementList
/// </seealso>
class PrimitiveInlineList : Repeater {
/// <summary>
/// This factory is used to create a suitable collection list.
/// </summary>
private readonly CollectionFactory factory;
/// <summary>
/// This performs the traversal used for object serialization.
/// </summary>
private readonly Primitive root;
/// <summary>
/// This is the name that each list element is wrapped with.
/// </summary>
private readonly String parent;
/// <summary>
/// This is the type of object that will be held in the list.
/// </summary>
private readonly Type entry;
/// <summary>
/// Constructor for the <c>PrimitiveInlineList</c> object.
/// This is given the list type and entry type to be used. The list
/// type is the <c>Collection</c> implementation that is used
/// to collect the deserialized entry objects from the XML source.
/// </summary>
/// <param name="context">
/// this is the context object used for serialization
/// </param>
/// <param name="type">
/// this is the collection type for the list used
/// </param>
/// <param name="entry">
/// the entry type to be stored within the list
/// </param>
/// <param name="parent">
/// this is the name to wrap the list element with
/// </param>
public PrimitiveInlineList(Context context, Type type, Type entry, String parent) {
this.factory = new CollectionFactory(context, type);
this.root = new Primitive(context, entry);
this.parent = parent;
this.entry = entry;
}
/// <summary>
/// This <c>read</c> method wll read the XML element list from
/// the provided node and deserialize its children as entry types.
/// This will deserialize each entry type as a primitive value. In
/// order to do this the parent string provided forms the element.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public Object Read(InputNode node) {
Object value = factory.Instance;
Collection list = (Collection) value;
if(list != null) {
return Read(node, list);
}
return null;
}
/// <summary>
/// This <c>read</c> method wll read the XML element list from
/// the provided node and deserialize its children as entry types.
/// This will deserialize each entry type as a primitive value. In
/// order to do this the parent string provided forms the element.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public Object Read(InputNode node, Object value) {
Collection list = (Collection) value;
if(list != null) {
return Read(node, list);
}
return Read(node);
}
/// <summary>
/// This <c>read</c> method wll read the XML element list from
/// the provided node and deserialize its children as entry types.
/// This will deserialize each entry type as a primitive value. In
/// order to do this the parent string provided forms the element.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <param name="list">
/// this is the collection that is to be populated
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public Object Read(InputNode node, Collection list) {
InputNode from = node.getParent();
String name = node.getName();
while(node != null) {
Object item = root.Read(node);
if(item != null) {
list.add(item);
}
node = from.getNext(name);
}
return list;
}
/// <summary>
/// This <c>read</c> method wll read the XML element list from
/// the provided node and deserialize its children as entry types.
/// This will deserialize each entry type as a primitive value. In
/// order to do this the parent string provided forms the element.
/// </summary>
/// <param name="node">
/// this is the XML element that is to be deserialized
/// </param>
/// <returns>
/// this returns the item to attach to the object contact
/// </returns>
public bool Validate(InputNode node) {
InputNode from = node.getParent();
String name = node.getName();
while(node != null) {
bool valid = root.Validate(node);
if(valid == false) {
return false;
}
node = from.getNext(name);
}
return true;
}
/// <summary>
/// This <c>write</c> method will write the specified object
/// to the given XML element as as list entries. Each entry within
/// the given list must be assignable to the given primitive type.
/// This will deserialize each entry type as a primitive value. In
/// order to do this the parent string provided forms the element.
/// </summary>
/// <param name="source">
/// this is the source collection to be serialized
/// </param>
/// <param name="node">
/// this is the XML element container to be populated
/// </param>
public void Write(OutputNode node, Object source) {
OutputNode parent = node.getParent();
Mode mode = node.getMode();
if(!node.isCommitted()) {
node.remove();
}
Write(parent, source, mode);
}
/// <summary>
/// This <c>write</c> method will write the specified object
/// to the given XML element as as list entries. Each entry within
/// the given list must be assignable to the given primitive type.
/// This will deserialize each entry type as a primitive value. In
/// order to do this the parent string provided forms the element.
/// </summary>
/// <param name="node">
/// this is the parent output node to write values to
/// </param>
/// <param name="source">
/// this is the source collection to be serialized
/// </param>
/// <param name="mode">
/// this is used to determine whether to output CDATA
/// </param>
public void Write(OutputNode node, Object source, Mode mode) {
Collection list = (Collection) source;
for(Object item : list) {
if(item != null) {
OutputNode child = node.getChild(parent);
if(!IsOverridden(child, item)) {
child.setMode(mode);
root.Write(child, item);
}
}
}
}
/// <summary>
/// This is used to determine whether the specified value has been
/// overridden by the strategy. If the item has been overridden
/// then no more serialization is require for that value, this is
/// effectively telling the serialization process to stop writing.
/// </summary>
/// <param name="node">
/// the node that a potential override is written to
/// </param>
/// <param name="value">
/// this is the object instance to be serialized
/// </param>
/// <returns>
/// returns true if the strategy overrides the object
/// </returns>
public bool IsOverridden(OutputNode node, Object value) {
return factory.setOverride(entry, value, node);
}
}
}
| |
using System;
using System.Runtime.InteropServices;
using System.Linq;
namespace SteamNative
{
internal unsafe class SteamApps : IDisposable
{
//
// Holds a platform specific implentation
//
internal Platform.Interface platform;
internal Facepunch.Steamworks.BaseSteamworks steamworks;
//
// Constructor decides which implementation to use based on current platform
//
internal SteamApps( Facepunch.Steamworks.BaseSteamworks steamworks, IntPtr pointer )
{
this.steamworks = steamworks;
if ( Platform.IsWindows64 ) platform = new Platform.Win64( pointer );
else if ( Platform.IsWindows32 ) platform = new Platform.Win32( pointer );
else if ( Platform.IsLinux32 ) platform = new Platform.Linux32( pointer );
else if ( Platform.IsLinux64 ) platform = new Platform.Linux64( pointer );
else if ( Platform.IsOsx ) platform = new Platform.Mac( pointer );
}
//
// Class is invalid if we don't have a valid implementation
//
public bool IsValid{ get{ return platform != null && platform.IsValid; } }
//
// When shutting down clear all the internals to avoid accidental use
//
public virtual void Dispose()
{
if ( platform != null )
{
platform.Dispose();
platform = null;
}
}
// bool
// with: Detect_StringFetch False
public bool BGetDLCDataByIndex( int iDLC /*int*/, ref AppId_t pAppID /*AppId_t **/, ref bool pbAvailable /*bool **/, out string pchName /*char **/ )
{
bool bSuccess = default( bool );
pchName = string.Empty;
System.Text.StringBuilder pchName_sb = Helpers.TakeStringBuilder();
int cchNameBufferSize = 4096;
bSuccess = platform.ISteamApps_BGetDLCDataByIndex( iDLC, ref pAppID.Value, ref pbAvailable, pchName_sb, cchNameBufferSize );
if ( !bSuccess ) return bSuccess;
pchName = pchName_sb.ToString();
return bSuccess;
}
// bool
public bool BIsAppInstalled( AppId_t appID /*AppId_t*/ )
{
return platform.ISteamApps_BIsAppInstalled( appID.Value );
}
// bool
public bool BIsCybercafe()
{
return platform.ISteamApps_BIsCybercafe();
}
// bool
public bool BIsDlcInstalled( AppId_t appID /*AppId_t*/ )
{
return platform.ISteamApps_BIsDlcInstalled( appID.Value );
}
// bool
public bool BIsLowViolence()
{
return platform.ISteamApps_BIsLowViolence();
}
// bool
public bool BIsSubscribed()
{
return platform.ISteamApps_BIsSubscribed();
}
// bool
public bool BIsSubscribedApp( AppId_t appID /*AppId_t*/ )
{
return platform.ISteamApps_BIsSubscribedApp( appID.Value );
}
// bool
public bool BIsSubscribedFromFreeWeekend()
{
return platform.ISteamApps_BIsSubscribedFromFreeWeekend();
}
// bool
public bool BIsVACBanned()
{
return platform.ISteamApps_BIsVACBanned();
}
// int
public int GetAppBuildId()
{
return platform.ISteamApps_GetAppBuildId();
}
// uint
// with: Detect_StringFetch True
public string GetAppInstallDir( AppId_t appID /*AppId_t*/ )
{
uint bSuccess = default( uint );
System.Text.StringBuilder pchFolder_sb = Helpers.TakeStringBuilder();
uint cchFolderBufferSize = 4096;
bSuccess = platform.ISteamApps_GetAppInstallDir( appID.Value, pchFolder_sb, cchFolderBufferSize );
if ( bSuccess <= 0 ) return null;
return pchFolder_sb.ToString();
}
// ulong
public ulong GetAppOwner()
{
return platform.ISteamApps_GetAppOwner();
}
// string
// with: Detect_StringReturn
public string GetAvailableGameLanguages()
{
IntPtr string_pointer;
string_pointer = platform.ISteamApps_GetAvailableGameLanguages();
return Marshal.PtrToStringAnsi( string_pointer );
}
// bool
// with: Detect_StringFetch True
public string GetCurrentBetaName()
{
bool bSuccess = default( bool );
System.Text.StringBuilder pchName_sb = Helpers.TakeStringBuilder();
int cchNameBufferSize = 4096;
bSuccess = platform.ISteamApps_GetCurrentBetaName( pchName_sb, cchNameBufferSize );
if ( !bSuccess ) return null;
return pchName_sb.ToString();
}
// string
// with: Detect_StringReturn
public string GetCurrentGameLanguage()
{
IntPtr string_pointer;
string_pointer = platform.ISteamApps_GetCurrentGameLanguage();
return Marshal.PtrToStringAnsi( string_pointer );
}
// int
public int GetDLCCount()
{
return platform.ISteamApps_GetDLCCount();
}
// bool
public bool GetDlcDownloadProgress( AppId_t nAppID /*AppId_t*/, out ulong punBytesDownloaded /*uint64 **/, out ulong punBytesTotal /*uint64 **/ )
{
return platform.ISteamApps_GetDlcDownloadProgress( nAppID.Value, out punBytesDownloaded, out punBytesTotal );
}
// uint
public uint GetEarliestPurchaseUnixTime( AppId_t nAppID /*AppId_t*/ )
{
return platform.ISteamApps_GetEarliestPurchaseUnixTime( nAppID.Value );
}
// SteamAPICall_t
public CallbackHandle GetFileDetails( string pszFileName /*const char **/, Action<FileDetailsResult_t, bool> CallbackFunction = null /*Action<FileDetailsResult_t, bool>*/ )
{
SteamAPICall_t callback = 0;
callback = platform.ISteamApps_GetFileDetails( pszFileName );
if ( CallbackFunction == null ) return null;
return FileDetailsResult_t.CallResult( steamworks, callback, CallbackFunction );
}
// uint
public uint GetInstalledDepots( AppId_t appID /*AppId_t*/, IntPtr pvecDepots /*DepotId_t **/, uint cMaxDepots /*uint32*/ )
{
return platform.ISteamApps_GetInstalledDepots( appID.Value, (IntPtr) pvecDepots, cMaxDepots );
}
// string
// with: Detect_StringReturn
public string GetLaunchQueryParam( string pchKey /*const char **/ )
{
IntPtr string_pointer;
string_pointer = platform.ISteamApps_GetLaunchQueryParam( pchKey );
return Marshal.PtrToStringAnsi( string_pointer );
}
// void
public void InstallDLC( AppId_t nAppID /*AppId_t*/ )
{
platform.ISteamApps_InstallDLC( nAppID.Value );
}
// bool
public bool MarkContentCorrupt( bool bMissingFilesOnly /*bool*/ )
{
return platform.ISteamApps_MarkContentCorrupt( bMissingFilesOnly );
}
// void
public void RequestAllProofOfPurchaseKeys()
{
platform.ISteamApps_RequestAllProofOfPurchaseKeys();
}
// void
public void RequestAppProofOfPurchaseKey( AppId_t nAppID /*AppId_t*/ )
{
platform.ISteamApps_RequestAppProofOfPurchaseKey( nAppID.Value );
}
// void
public void UninstallDLC( AppId_t nAppID /*AppId_t*/ )
{
platform.ISteamApps_UninstallDLC( nAppID.Value );
}
}
}
| |
// Copyright 2005, 2006 - Morten Nielsen (www.iter.dk)
//
// This file is part of SharpMap.
// SharpMap is free software; you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation; either version 2 of the License, or
// (at your option) any later version.
//
// SharpMap is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
// You should have received a copy of the GNU Lesser General Public License
// along with SharpMap; if not, write to the Free Software
// Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
// SOURCECODE IS MODIFIED FROM ANOTHER WORK AND IS ORIGINALLY BASED ON GeoTools.NET:
/*
* Copyright (C) 2002 Urban Science Applications, Inc.
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
*/
#region Using
using System;
using System.IO;
using System.Text;
#endregion Using
// http://java.sun.com/j2se/1.4/docs/api/java/io/StreamTokenizer.html
// a better implementation could be written. Here is a good Java implementation of StreamTokenizer.
// http://www.flex-compiler.lcs.mit.edu/Harpoon/srcdoc/java/io/StreamTokenizer.html
// a C# StringTokenizer
// http://sourceforge.net/snippet/detail.php?type=snippet&id=101171
namespace SharpMap.Converters.WellKnownText.IO
{
///<summary>
///The StreamTokenizer class takes an input stream and parses it into "tokens", allowing the tokens to be read one at a time. The parsing process is controlled by a table and a number of flags that can be set to various states. The stream tokenizer can recognize identifiers, numbers, quoted strings, and various comment style
///</summary>
///<remarks>
///This is a crude c# implementation of Java's <a href="http://java.sun.com/products/jdk/1.2/docs/api/java/io/StreamTokenizer.html">StreamTokenizer</a> class.
///</remarks>
internal class StreamTokenizer
{
private int _colNumber = 1;
private StringBuilder _currentToken;
private TokenType _currentTokenType;
private bool _ignoreWhitespace = false;
private int _lineNumber = 1;
private TextReader _reader;
#region Constructors
/// <summary>
/// Initializes a new instance of the StreamTokenizer class.
/// </summary>
/// <param name="reader">A TextReader with some text to read.</param>
/// <param name="ignoreWhitespace">Flag indicating whether whitespace should be ignored.</param>
public StreamTokenizer(TextReader reader, bool ignoreWhitespace)
{
if (reader == null)
{
throw new ArgumentNullException("reader");
}
_reader = reader;
_ignoreWhitespace = ignoreWhitespace;
_currentToken = new StringBuilder();
}
#endregion Constructors
#region Properties
/// <summary>
/// The current line number of the stream being read.
/// </summary>
public int LineNumber
{
get { return _lineNumber; }
}
/// <summary>
/// The current column number of the stream being read.
/// </summary>
public int Column
{
get { return _colNumber; }
}
#endregion Properties
#region Methods
/// <summary>
/// If the current token is a number, this field contains the value of that number.
/// </summary>
/// <remarks>
/// If the current token is a number, this field contains the value of that number. The current token is a number when the value of the ttype field is TT_NUMBER.
/// </remarks>
/// <exception cref="FormatException">Current token is not a number in a valid format.</exception>
public double GetNumericValue()
{
string number = GetStringValue();
if (GetTokenType() == TokenType.Number)
{
return double.Parse(number, Map.NumberFormatEnUs);
}
throw new Exception(String.Format(Map.NumberFormatEnUs,
"The token '{0}' is not a number at line {1} column {2}.", number,
LineNumber, Column));
;
}
/// <summary>
/// If the current token is a word token, this field contains a string giving the characters of the word token.
/// </summary>
public string GetStringValue()
{
return _currentToken.ToString();
}
/// <summary>
/// Gets the token type of the current token.
/// </summary>
/// <returns></returns>
public TokenType GetTokenType()
{
return _currentTokenType;
}
/// <summary>
/// Returns the next token.
/// </summary>
/// <param name="ignoreWhitespace">Determines is whitespace is ignored. True if whitespace is to be ignored.</param>
/// <returns>The TokenType of the next token.</returns>
public TokenType NextToken(bool ignoreWhitespace)
{
TokenType nextTokenType;
if (ignoreWhitespace)
{
nextTokenType = NextNonWhitespaceToken();
}
else
{
nextTokenType = NextTokenAny();
}
return nextTokenType;
}
/// <summary>
/// Returns the next token.
/// </summary>
/// <returns>The TokenType of the next token.</returns>
public TokenType NextToken()
{
return NextToken(_ignoreWhitespace);
}
private TokenType NextTokenAny()
{
TokenType nextTokenType = TokenType.Eof;
char[] chars = new char[1];
//_currentToken.Clear();
_currentToken.Length = 0;
_currentTokenType = TokenType.Eof;
int finished = _reader.Read(chars, 0, 1);
bool isNumber = false;
bool isWord = false;
Char currentCharacter;
Char nextCharacter;
while (finished != 0)
{
currentCharacter = chars[0];
nextCharacter = (char)_reader.Peek();
_currentTokenType = GetType(currentCharacter);
nextTokenType = GetType(nextCharacter);
// handling of words with _
if (isWord && currentCharacter == '_')
{
_currentTokenType = TokenType.Word;
}
// handing of words ending in numbers
if (isWord && _currentTokenType == TokenType.Number)
{
_currentTokenType = TokenType.Word;
}
if (_currentTokenType == TokenType.Word && nextCharacter == '_')
{
//enable words with _ inbetween
nextTokenType = TokenType.Word;
isWord = true;
}
if (_currentTokenType == TokenType.Word && nextTokenType == TokenType.Number)
{
//enable words ending with numbers
nextTokenType = TokenType.Word;
isWord = true;
}
// handle negative numbers
if (currentCharacter == '-' && nextTokenType == TokenType.Number) // && isNumber == false)
{
_currentTokenType = TokenType.Number;
nextTokenType = TokenType.Number;
//isNumber = true;
}
// this handles numbers with exponential values
if (isNumber && (nextCharacter.Equals('E') || nextCharacter.Equals('e')))
{
nextTokenType = TokenType.Number;
}
if (isNumber && (currentCharacter.Equals('E') || currentCharacter.Equals('e')) && (nextTokenType == TokenType.Number || nextTokenType == TokenType.Symbol))
{
_currentTokenType = TokenType.Number;
nextTokenType = TokenType.Number;
}
// this handles numbers with a decimal point
if (isNumber && nextTokenType == TokenType.Number && currentCharacter == '.')
{
_currentTokenType = TokenType.Number;
}
if (_currentTokenType == TokenType.Number && nextCharacter == '.' && isNumber == false)
{
nextTokenType = TokenType.Number;
isNumber = true;
}
_colNumber++;
if (_currentTokenType == TokenType.Eol)
{
_lineNumber++;
_colNumber = 1;
}
_currentToken.Append(currentCharacter);
//if (_currentTokenType==TokenType.Word && nextCharacter=='_')
//{
// enable words with _ inbetween
// finished = _reader.Read(chars,0,1);
//}
if (_currentTokenType != nextTokenType)
{
finished = 0;
}
else if (_currentTokenType == TokenType.Symbol && currentCharacter != '-')
{
finished = 0;
}
else
{
finished = _reader.Read(chars, 0, 1);
}
}
return _currentTokenType;
}
/// <summary>
/// Determines a characters type (e.g. number, symbols, character).
/// </summary>
/// <param name="character">The character to determine.</param>
/// <returns>The TokenType the character is.</returns>
private TokenType GetType(char character)
{
if (Char.IsDigit(character))
{
return TokenType.Number;
}
else if (Char.IsLetter(character))
{
return TokenType.Word;
}
else if (character == '\n')
{
return TokenType.Eol;
}
else if (Char.IsWhiteSpace(character) || Char.IsControl(character))
{
return TokenType.Whitespace;
}
else //(Char.IsSymbol(character))
{
return TokenType.Symbol;
}
}
/// <summary>
/// Returns next token that is not whitespace.
/// </summary>
/// <returns></returns>
private TokenType NextNonWhitespaceToken()
{
TokenType tokentype = NextTokenAny();
while (tokentype == TokenType.Whitespace || tokentype == TokenType.Eol)
{
tokentype = NextTokenAny();
}
return tokentype;
}
#endregion Methods
}
}
| |
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Threading.Tasks;
using Baseline;
using Marten.Events.Archiving;
using Marten.Events.Querying;
using Marten.Exceptions;
using Marten.Internal.Sessions;
using Marten.Linq;
using Marten.Schema.Identity;
using Marten.Storage;
using Npgsql;
using Weasel.Core;
#nullable enable
namespace Marten.Events
{
internal class EventStore: QueryEventStore, IEventStore
{
private readonly DocumentSessionBase _session;
private readonly ITenant _tenant;
private readonly DocumentStore _store;
public EventStore(DocumentSessionBase session, DocumentStore store, ITenant tenant) : base(session, store, tenant)
{
_session = session;
_store = store;
_tenant = tenant;
}
public StreamAction Append(Guid stream, IEnumerable<object> events)
{
//TODO NRT: We're ignoring null here as to not unintentionally change any downstream behaviour - Replace with null guards in the future.
return Append(stream, events?.ToArray()!);
}
public StreamAction Append(Guid stream, params object[] events)
{
return _store.Events.Append(_session, stream, events);
}
public StreamAction Append(string stream, IEnumerable<object> events)
{
return Append(stream, events?.ToArray()!);
}
public StreamAction Append(string stream, params object[] events)
{
return _store.Events.Append(_session, stream, events);
}
public StreamAction Append(Guid stream, long expectedVersion, IEnumerable<object> events)
{
return Append(stream, expectedVersion, events?.ToArray()!);
}
public StreamAction Append(Guid stream, long expectedVersion, params object[] events)
{
var eventStream = Append(stream, events);
eventStream.ExpectedVersionOnServer = expectedVersion - eventStream.Events.Count;
return eventStream;
}
public StreamAction Append(string stream, long expectedVersion, IEnumerable<object> events)
{
return Append(stream, expectedVersion, events?.ToArray()!);
}
public StreamAction Append(string stream, long expectedVersion, params object[] events)
{
var eventStream = Append(stream, events);
eventStream.ExpectedVersionOnServer = expectedVersion - events.Length;
return eventStream;
}
public StreamAction StartStream<TAggregate>(Guid id, IEnumerable<object> events) where TAggregate : class
{
return StartStream<TAggregate>(id, events?.ToArray()!);
}
public StreamAction StartStream<T>(Guid id, params object[] events) where T : class
{
return StartStream(typeof(T), id, events);
}
public StreamAction StartStream(Type aggregateType, Guid id, IEnumerable<object> events)
{
return StartStream(aggregateType, id, events?.ToArray()!);
}
public StreamAction StartStream(Type aggregateType, Guid id, params object[] events)
{
var stream = _store.Events.StartStream(_session, id, events);
stream.AggregateType = aggregateType;
return stream;
}
public StreamAction StartStream<TAggregate>(string streamKey, IEnumerable<object> events) where TAggregate : class
{
return StartStream<TAggregate>(streamKey, events?.ToArray()!);
}
public StreamAction StartStream<TAggregate>(string streamKey, params object[] events) where TAggregate : class
{
return StartStream(typeof(TAggregate), streamKey, events);
}
public StreamAction StartStream(Type aggregateType, string streamKey, IEnumerable<object> events)
{
return StartStream(aggregateType, streamKey, events?.ToArray()!);
}
public StreamAction StartStream(Type aggregateType, string streamKey, params object[] events)
{
var stream = _store.Events.StartStream(_session, streamKey, events);
stream.AggregateType = aggregateType;
return stream;
}
public StreamAction StartStream(Guid id, IEnumerable<object> events)
{
return StartStream(id, events?.ToArray()!);
}
public StreamAction StartStream(Guid id, params object[] events)
{
return _store.Events.StartStream(_session, id, events);
}
public StreamAction StartStream(string streamKey, IEnumerable<object> events)
{
return StartStream(streamKey, events?.ToArray()!);
}
public StreamAction StartStream(string streamKey, params object[] events)
{
return _store.Events.StartStream(_session, streamKey, events);
}
public StreamAction StartStream<TAggregate>(IEnumerable<object> events) where TAggregate : class
{
return StartStream<TAggregate>(events?.ToArray()!);
}
public StreamAction StartStream<TAggregate>(params object[] events) where TAggregate : class
{
return StartStream(typeof(TAggregate), events);
}
public StreamAction StartStream(Type aggregateType, IEnumerable<object> events)
{
return StartStream(aggregateType, events?.ToArray()!);
}
public StreamAction StartStream(Type aggregateType, params object[] events)
{
return StartStream(aggregateType, CombGuidIdGeneration.NewGuid(), events);
}
public StreamAction StartStream(IEnumerable<object> events)
{
return StartStream(events?.ToArray()!);
}
public StreamAction StartStream(params object[] events)
{
return StartStream(CombGuidIdGeneration.NewGuid(), events);
}
public async Task AppendOptimistic(string streamKey, CancellationToken token, params object[] events)
{
_store.Events.EnsureAsStringStorage(_session);
var cmd = new NpgsqlCommand($"select version from {_store.Events.DatabaseSchemaName}.mt_streams where id = :id")
.With("id", streamKey);
var version = await readVersionFromExistingStream(streamKey, token, cmd).ConfigureAwait(false);
var action = Append(streamKey, events);
action.ExpectedVersionOnServer = version;
}
private async Task<long> readVersionFromExistingStream(object streamId, CancellationToken token, NpgsqlCommand cmd)
{
long version = 0;
try
{
using var reader = await _session.Database.ExecuteReaderAsync(cmd, token).ConfigureAwait(false);
if (await reader.ReadAsync(token).ConfigureAwait(false))
{
version = await reader.GetFieldValueAsync<long>(0, token).ConfigureAwait(false);
}
}
catch (Exception e)
{
if (e.Message.Contains(MartenCommandException.MaybeLockedRowsMessage))
{
throw new StreamLockedException(streamId, e.InnerException);
}
throw;
}
if (version == 0)
{
throw new NonExistentStreamException(streamId);
}
return version;
}
public Task AppendOptimistic(string streamKey, params object[] events)
{
return AppendOptimistic(streamKey, CancellationToken.None, events);
}
public async Task AppendOptimistic(Guid streamId, CancellationToken token, params object[] events)
{
_store.Events.EnsureAsGuidStorage(_session);
var cmd = new NpgsqlCommand($"select version from {_store.Events.DatabaseSchemaName}.mt_streams where id = :id")
.With("id", streamId);
var version = await readVersionFromExistingStream(streamId, token, cmd).ConfigureAwait(false);
var action = Append(streamId, events);
action.ExpectedVersionOnServer = version;
}
public Task AppendOptimistic(Guid streamId, params object[] events)
{
return AppendOptimistic(streamId, CancellationToken.None, events);
}
public async Task AppendExclusive(string streamKey, CancellationToken token, params object[] events)
{
_store.Events.EnsureAsStringStorage(_session);
var cmd = new NpgsqlCommand($"select version from {_store.Events.DatabaseSchemaName}.mt_streams where id = :id for update")
.With("id", streamKey);
await _session.Database.BeginTransactionAsync(token).ConfigureAwait(false);
var version = await readVersionFromExistingStream(streamKey, token, cmd).ConfigureAwait(false);
var action = Append(streamKey, events);
action.ExpectedVersionOnServer = version;
}
public Task AppendExclusive(string streamKey, params object[] events)
{
return AppendExclusive(streamKey, CancellationToken.None, events);
}
public async Task AppendExclusive(Guid streamId, CancellationToken token, params object[] events)
{
_store.Events.EnsureAsGuidStorage(_session);
var cmd = new NpgsqlCommand($"select version from {_store.Events.DatabaseSchemaName}.mt_streams where id = :id for update")
.With("id", streamId);
await _session.Database.BeginTransactionAsync(token).ConfigureAwait(false);
var version = await readVersionFromExistingStream(streamId, token, cmd).ConfigureAwait(false);
var action = Append(streamId, events);
action.ExpectedVersionOnServer = version;
}
public Task AppendExclusive(Guid streamId, params object[] events)
{
return AppendExclusive(streamId, CancellationToken.None, events);
}
public void ArchiveStream(Guid streamId)
{
var op = new ArchiveStreamOperation(_store.Events, streamId);
_session.QueueOperation(op);
}
public void ArchiveStream(string streamKey)
{
var op = new ArchiveStreamOperation(_store.Events, streamKey);
_session.QueueOperation(op);
}
}
}
| |
using System.Xml;
using System.Collections.Generic;
using System.Text;
using System.IO;
namespace System.Primitives.TextPacks
{
/// <summary>
/// <root key="value">
/// <sub key2="value2">text</sub>
/// </root>
/// Key = value {attrib on root}
/// Sub:: = text {node in root}
/// Sub::Key2 = value2 {attrib on node}
/// </summary>
public class XmlTextPack : TextPackBase
{
public static XmlTextPack Instance = new XmlTextPack();
#region Class Types
/// <summary>
/// Context
/// </summary>
private class Context
{
public string Key;
public string ScopeKey;
public XmlElement XmlElement;
public XmlDocument XmlDocument;
/// <summary>
/// Gets the context.
/// </summary>
/// <param name="pack">The pack.</param>
/// <param name="contextKey">The context key.</param>
/// <param name="context">The context.</param>
/// <returns></returns>
public static Context GetContext(string pack, string contextKey, ref object context)
{
if (contextKey == null)
throw new ArgumentNullException("contextKey");
if (context == null)
context = new Context();
var context2 = (Context)context;
if (context2.Key == contextKey)
return context2;
context2.Key = contextKey;
var xmlDocument = new XmlDocument();
if (pack.Length > 0)
xmlDocument.LoadXml(pack);
context2.XmlDocument = xmlDocument;
return context2;
}
}
#endregion
/// <summary>
/// Initializes a new instance of the <see cref="XmlTextPack"/> class.
/// </summary>
private XmlTextPack() { }
/// <summary>
/// Parses the key.
/// </summary>
/// <param name="key">The key.</param>
/// <param name="isValue">if set to <c>true</c> [is value].</param>
/// <param name="scopeKey">The scope key.</param>
/// <param name="itemKey">The item key.</param>
private void ParseKey(string key, out bool isValue, out string scopeKey, out string itemKey)
{
// precalc key
if (key.IndexOf(CoreEx.Scope) == -1)
key = CoreEx.Scope + key;
// parse key
isValue = key.EndsWith(CoreEx.Scope);
if (!isValue)
{
int scopeIndex = key.LastIndexOf(CoreEx.Scope);
if (scopeIndex == 0)
{
scopeKey = string.Empty;
itemKey = key.Substring(2);
}
else
{
scopeIndex += 2;
scopeKey = key.Substring(0, scopeIndex);
itemKey = key.Substring(scopeIndex);
}
}
else
{
scopeKey = key;
itemKey = string.Empty;
}
}
/// <summary>
/// Encodes the key.
/// </summary>
/// <param name="key">The key.</param>
/// <returns></returns>
private string EncodeKey(string key)
{
if (key.IndexOf(CoreEx.Scope) == -1)
return "\x01" + key.Replace(CoreEx.Scope, "\x01") + "-";
return key.Replace(CoreEx.Scope, "\x01") + "_";
}
/// <summary>
/// Decodes the key.
/// </summary>
/// <param name="key">The key.</param>
/// <returns></returns>
private string DecodeKey(string key)
{
if (key.EndsWith("-"))
return key.Substring(1, key.Length - 2).Replace("\x01", CoreEx.Scope);
return key.Substring(0, key.Length - 1).Replace("\x01", CoreEx.Scope);
}
/// <summary>
/// Parses the encoded key.
/// </summary>
/// <param name="key">The key.</param>
/// <param name="isValue">if set to <c>true</c> [is value].</param>
/// <param name="scopeKey">The scope key.</param>
/// <param name="itemKey">The item key.</param>
private void ParseEncodedKey(string key, out bool isValue, out string scopeKey, out string itemKey)
{
// parse key
int keyLength = key.Length;
isValue = (key[keyLength - 2] == '\x01'); //- key.EndsWith("\x01_");
if (!isValue)
{
int scopeIndex = key.LastIndexOf("\x01", keyLength - 2);
if (scopeIndex == 0)
{
scopeKey = string.Empty;
itemKey = key.Substring(1, keyLength - 2);
}
else
{
scopeIndex += 1;
scopeKey = key.Substring(0, scopeIndex);
itemKey = key.Substring(scopeIndex, keyLength - scopeIndex - 1);
}
}
else
{
scopeKey = key.Substring(0, keyLength - 1);
itemKey = string.Empty;
}
}
/// <summary>
/// Gets the value associated with the specified key out of the packed representation provided.
/// </summary>
/// <param name="pack">The packed representation of the data</param>
/// <param name="key">The key to use</param>
/// <param name="contextKey">The context key to use in looking for a preparsed collection.</param>
/// <param name="context">The context object to use in looking for a preparsed collection.</param>
/// <returns></returns>
public override string GetValue(string pack, string key, string contextKey, ref object context)
{
if (string.IsNullOrEmpty(pack))
return string.Empty;
// context
var context2 = Context.GetContext(pack, contextKey, ref context);
var xmlDocument = context2.XmlDocument;
// parse key
bool isValue;
string scopeKey;
string itemKey;
ParseKey(key, out isValue, out scopeKey, out itemKey);
// xmlelement
XmlElement xmlElement;
if (context2.ScopeKey != scopeKey)
{
context2.ScopeKey = scopeKey;
// find element
xmlElement = xmlDocument.DocumentElement;
if (xmlElement == null)
{
context2.XmlElement = null;
return string.Empty;
}
string xpath = "/" + xmlElement.Name;
if (scopeKey.Length > 0)
// singhj: scopeKey doesn't include trailing "::"
xpath += "/" + scopeKey.Replace(CoreEx.Scope, "/"); // was: xpath += "/" + scopeKey.Substring(0, scopeKey.Length - 2).Replace(KernelText.Scope, "/");
xmlElement = (xmlDocument.SelectSingleNode(xpath) as XmlElement);
context2.XmlElement = xmlElement;
}
else
xmlElement = context2.XmlElement;
// get value
return (xmlElement != null ? (!isValue ? xmlElement.GetAttribute(itemKey) : xmlElement.InnerText) : string.Empty);
}
/// <summary>
/// Sets the value within the packed string specified that is associated with the key provided.
/// </summary>
/// <param name="pack">The packed string to inspect.</param>
/// <param name="key">The key to use</param>
/// <param name="value">The value to set</param>
/// <param name="contextKey">The context key to use in storing the result.</param>
/// <param name="context">The context object to use in storing the result.</param>
/// <returns>The packed format of the value</returns>
public override string SetValue(string pack, string key, string value, string contextKey, ref object context)
{
// context
var context2 = Context.GetContext(pack, contextKey, ref context);
var xmlDocument = context2.XmlDocument;
// parse key
bool isValue;
string scopeKey;
string itemKey;
ParseKey(key, out isValue, out scopeKey, out itemKey);
// xmlelement
XmlElement xmlElement;
if (context2.ScopeKey != scopeKey)
{
context2.ScopeKey = scopeKey;
// find element
xmlElement = xmlDocument.DocumentElement;
if (xmlElement == null)
{
xmlElement = xmlDocument.CreateElement("root");
xmlDocument.AppendChild(xmlElement);
}
if (scopeKey.Length > 0)
{
// singhj: scopeKey doesn't include trailing "::"
string[] scopeKeyArray = scopeKey.Split(new string[] { CoreEx.Scope }, StringSplitOptions.None); //was: string[] scopeKeyArray = scopeKey.Substring(0, scopeKey.Length - 2).Split(new string[] { KernelText.Scope }, StringSplitOptions.None);
foreach (string scopeKey2 in scopeKeyArray)
{
var xmlElement2 = xmlElement[scopeKey2];
if (xmlElement2 == null)
{
xmlElement2 = xmlDocument.CreateElement(scopeKey2);
xmlElement.AppendChild(xmlElement2);
}
xmlElement = xmlElement2;
}
}
context2.XmlElement = xmlElement;
}
else
xmlElement = context2.XmlElement;
// set value
if (!isValue)
xmlElement.SetAttribute(itemKey, value);
else
xmlElement.Value = value;
return xmlDocument.InnerXml;
}
#region CODEC
/// <summary>
/// Provides the ability to decode the contents of the pack provided into the hash instance provided, based on the logic
/// provided by <see cref="M:PackDecodeRecurse">PackDecodeRecurse</see>. The result is contained in the hash provided.
/// </summary>
/// <param name="pack">The packed string to process.</param>
/// <param name="namespaceKey">The namespace key to use for qualify keys.</param>
/// <param name="hash">The hash containing the contents to pack</param>
/// <param name="validKeyIndex">Collection of valid keys used to filter the packed items.</param>
public override void PackDecode(string pack, string namespaceKey, IDictionary<string, string> set, IDictionary<string, string> validKeyIndex)
{
if (string.IsNullOrEmpty(pack))
return;
var r = XmlReader.Create(new StringReader(pack));
if (r.IsStartElement())
PackDecodeRecurse(string.Empty, r, (namespaceKey ?? string.Empty), set, validKeyIndex);
r.Close();
}
/// <summary>
/// Packs the decode recurse.
/// </summary>
/// <param name="scope">The scope.</param>
/// <param name="xmlReader">The XML reader.</param>
/// <param name="namespaceKey">The namespace key.</param>
/// <param name="hash">The hash.</param>
/// <param name="validKeyIndex">Index of the valid key.</param>
private static void PackDecodeRecurse(string scope, XmlReader r, string namespaceKey, IDictionary<string, string> set, IDictionary<string, string> validKeyIndex)
{
bool isInNamespace = scope.StartsWith(namespaceKey, StringComparison.OrdinalIgnoreCase);
if (isInNamespace)
{
// parse attributes
if (r.HasAttributes)
{
while (r.MoveToNextAttribute())
{
string key = scope + r.Name;
// check validkeyindex and commit
if ((validKeyIndex == null) || (validKeyIndex.ContainsKey(key)))
set[scope + r.Name] = r.Value;
}
// move the reader back to the element node.
r.MoveToElement();
}
}
if (!r.IsEmptyElement)
{
// read the start tag.
r.Read();
bool isRead = true;
while (isRead)
switch (r.MoveToContent())
{
case System.Xml.XmlNodeType.CDATA:
case System.Xml.XmlNodeType.Text:
if (isInNamespace)
{
string key = (scope.Length > 0 ? scope : CoreEx.Scope);
// check validkeyindex and commit
if ((validKeyIndex == null) || (validKeyIndex.ContainsKey(key)))
set[key] = r.Value;
}
r.Read();
break;
case System.Xml.XmlNodeType.Element:
// handle nested elements.
if (r.IsStartElement())
{
PackDecodeRecurse(scope + r.Name + CoreEx.Scope, r, namespaceKey, set, validKeyIndex);
r.Read();
}
break;
default:
isRead = false;
break;
}
}
}
/// <summary>
/// Provides the ability to pack the contents in the hash provided into a different string representation.
/// Results is contained in the provided StringBuilder instance.
/// </summary>
/// <param name="hash">The hash.</param>
/// <param name="namespaceKey">The namespace key to use for qualify keys.</param>
/// <param name="textBuilder">The text builder.</param>
/// <param name="validKeyIndex">Index of the valid key.</param>
public override void PackEncode(IDictionary<string, string> set, string namespaceKey, StringBuilder b, IDictionary<string, string> validKeyIndex)
{
if ((set == null) || (set.Count == 0))
return;
// precalc keys
// pull keys from existing hash and validate against provided IDictionary, and encode into tree key structure
// field is prepended with identifyer for unencoding at the end to find original key
var keyList = new List<string>((ICollection<string>)set.Keys);
for (int keyListIndex = keyList.Count - 1; keyListIndex >= 0; keyListIndex--)
{
string key = keyList[keyListIndex];
// check for validkeyindex
if ((validKeyIndex != null) && (!validKeyIndex.ContainsKey(key)))
{
keyList.RemoveAt(keyListIndex);
continue;
}
// encode key
keyList[keyListIndex] = EncodeKey(key);
}
keyList.Sort(0, keyList.Count, StringComparer.OrdinalIgnoreCase);
//
var xmlWriter = XmlTextWriter.Create(b);
xmlWriter.WriteStartElement("root");
//
string lastScopeKey = string.Empty;
string elementValue = null;
foreach (string key in keyList)
{
// parse encoded key
bool isValue;
string scopeKey;
string itemKey;
ParseEncodedKey(key, out isValue, out scopeKey, out itemKey);
// process element
if ((scopeKey.Length > 1) && (lastScopeKey != scopeKey))
{
// write latched value
if (elementValue != null)
{
xmlWriter.WriteString(elementValue);
elementValue = null;
}
// element
if (scopeKey.StartsWith(lastScopeKey))
{
// start elements
int lastScopeKeyLength = lastScopeKey.Length;
string[] createScopeKeyArray = scopeKey.Substring(lastScopeKeyLength, scopeKey.Length - lastScopeKeyLength - 1).Split('\x01');
foreach (string createScopeKey in createScopeKeyArray)
xmlWriter.WriteStartElement(createScopeKey);
}
else
{
// end and start elements
string[] lastScopeKeyArray = lastScopeKey.Substring(0, lastScopeKey.Length - 1).Split('\x01');
string[] scopeKeyArray = scopeKey.Substring(0, scopeKey.Length - 1).Split('\x01');
int scopeKeyArrayLength = scopeKeyArray.Length;
// skip existing elements
int index;
for (index = 0; index < lastScopeKeyArray.Length; index++)
if ((index >= scopeKeyArrayLength) || (scopeKeyArray[index] != lastScopeKeyArray[index]))
break;
// end elements
for (int lastScopeKeyIndex = lastScopeKeyArray.Length - 1; lastScopeKeyIndex >= index; lastScopeKeyIndex--)
xmlWriter.WriteEndElement(); //-lastScopeKeyArray[lastScopeKeyIndex]
// start elements
for (int scopeKeyIndex = index; scopeKeyIndex < scopeKeyArray.Length; scopeKeyIndex++)
xmlWriter.WriteStartElement(scopeKeyArray[scopeKeyIndex]);
}
lastScopeKey = scopeKey;
}
// decode key and set value
string value = set[DecodeKey(key)];
if (!isValue)
xmlWriter.WriteAttributeString(itemKey, (value ?? string.Empty));
else
if (!string.IsNullOrEmpty(value))
elementValue = value;
}
// overflow close
// write latched value
if (elementValue != null)
xmlWriter.WriteString(elementValue);
xmlWriter.WriteEndDocument();
xmlWriter.Close();
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All rights reserved.
// Licensed under the MIT license. See LICENSE file in the project root for full license information.
using System;
using System.Collections.Generic;
using System.Collections;
using System.Globalization;
using Xunit;
using SortedList_SortedListUtils;
namespace SortedListCtorIntIKeyComp
{
public class Driver<KeyType, ValueType>
{
private Test m_test;
public Driver(Test test)
{
m_test = test;
}
private CultureInfo _english = new CultureInfo("en");
private CultureInfo _german = new CultureInfo("de");
private CultureInfo _danish = new CultureInfo("da");
private CultureInfo _turkish = new CultureInfo("tr");
//CompareString lcid_en-US, EMPTY_FLAGS, "AE", 0, 2, "\u00C4", 0, 1, 1, NULL_STRING
//CompareString 0x10407, EMPTY_FLAGS, "AE", 0, 2, "\u00C4", 0, 1, 0, NULL_STRING
//CompareString lcid_da-DK, "NORM_IGNORECASE", "aA", 0, 2, "Aa", 0, 2, -1, NULL_STRING
//CompareString lcid_en-US, "NORM_IGNORECASE", "aA", 0, 2, "Aa", 0, 2, 0, NULL_STRING
//CompareString lcid_tr-TR, "NORM_IGNORECASE", "\u0131", 0, 1, "\u0049", 0, 1, 0, NULL_STRING
private const String strAE = "AE";
private const String strUC4 = "\u00C4";
private const String straA = "aA";
private const String strAa = "Aa";
private const String strI = "I";
private const String strTurkishUpperI = "\u0131";
private const String strBB = "BB";
private const String strbb = "bb";
private const String value = "Default_Value";
public void TestVanilla(int capacity)
{
SortedList<String, String> _dic;
IComparer<String> comparer;
IComparer<String>[] predefinedComparers = new IComparer<String>[] {
StringComparer.CurrentCulture,
StringComparer.CurrentCultureIgnoreCase,
StringComparer.OrdinalIgnoreCase,
StringComparer.Ordinal};
foreach (IComparer<String> predefinedComparer in predefinedComparers)
{
_dic = new SortedList<String, String>(capacity, predefinedComparer);
m_test.Eval(_dic.Comparer == predefinedComparer, String.Format("Err_4568aijueud! Comparer differ expected: {0} actual: {1}", predefinedComparer, _dic.Comparer));
m_test.Eval(_dic.Count == 0, String.Format("Err_23497sg! Count different: {0}", _dic.Count));
m_test.Eval(((IDictionary<KeyType, ValueType>)_dic).IsReadOnly == false, String.Format("Err_435wsdg! Count different: {0}", ((IDictionary<KeyType, ValueType>)_dic).IsReadOnly));
m_test.Eval(_dic.Keys.Count == 0, String.Format("Err_25ag! Count different: {0}", _dic.Keys.Count));
m_test.Eval(_dic.Values.Count == 0, String.Format("Err_23agd! Count different: {0}", _dic.Values.Count));
}
//Current culture
CultureInfo.DefaultThreadCurrentCulture = _english;
comparer = StringComparer.CurrentCulture;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_848652ahued! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(strAE, value);
m_test.Eval(!_dic.ContainsKey(strUC4), String.Format("Err_235rdag! Wrong result returned: {0}", _dic.ContainsKey(strUC4)));
//bug #11263 in NDPWhidbey
CultureInfo.DefaultThreadCurrentCulture = _german;
comparer = StringComparer.CurrentCulture;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_54848ahuede! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(strAE, value);
m_test.Eval(!_dic.ContainsKey(strUC4), String.Format("Err_23r7ag! Wrong result returned: {0}", _dic.ContainsKey(strUC4)));
//CurrentCultureIgnoreCase
CultureInfo.DefaultThreadCurrentCulture = _english;
comparer = StringComparer.CurrentCultureIgnoreCase;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_788989ajeude! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(straA, value);
m_test.Eval(_dic.ContainsKey(strAa), String.Format("Err_237g! Wrong result returned: {0}", _dic.ContainsKey(strAa)));
CultureInfo.DefaultThreadCurrentCulture = _danish;
comparer = StringComparer.CurrentCultureIgnoreCase;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_54878aheuid! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(straA, value);
m_test.Eval(!_dic.ContainsKey(strAa), String.Format("Err_0723f! Wrong result returned: {0}", _dic.ContainsKey(strAa)));
//OrdinalIgnoreCase
CultureInfo.DefaultThreadCurrentCulture = _english;
comparer = StringComparer.OrdinalIgnoreCase;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_5588ahied! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(strI, value);
m_test.Eval(!_dic.ContainsKey(strTurkishUpperI), String.Format("Err_234qf! Wrong result returned: {0}", _dic.ContainsKey(strTurkishUpperI)));
CultureInfo.DefaultThreadCurrentCulture = _turkish;
comparer = StringComparer.OrdinalIgnoreCase;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_8488ahiued! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(strI, value);
m_test.Eval(!_dic.ContainsKey(strTurkishUpperI), String.Format("Err_234ra7g! Wrong result returned: {0}", _dic.ContainsKey(strTurkishUpperI)));
//Ordinal - not that many meaningful test
CultureInfo.DefaultThreadCurrentCulture = _english;
comparer = StringComparer.Ordinal;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_488ahede! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(strBB, value);
m_test.Eval(!_dic.ContainsKey(strbb), String.Format("Err_1244sd! Wrong result returned: {0}", _dic.ContainsKey(strbb)));
CultureInfo.DefaultThreadCurrentCulture = _danish;
comparer = StringComparer.Ordinal;
_dic = new SortedList<String, String>(capacity, comparer);
m_test.Eval(_dic.Comparer == comparer, String.Format("Err_05848ahied! Comparer differ expected: {0} actual: {1}", comparer, _dic.Comparer));
_dic.Add(strBB, value);
m_test.Eval(!_dic.ContainsKey(strbb), String.Format("Err_235aeg! Wrong result returned: {0}", _dic.ContainsKey(strbb)));
}
public void TestParm()
{
//passing null will revert to the default comparison mechanism
SortedList<String, String> _dic;
IComparer<String> comparer = null;
try
{
CultureInfo.DefaultThreadCurrentCulture = _english;
_dic = new SortedList<String, String>(0, comparer);
_dic.Add(straA, value);
m_test.Eval(!_dic.ContainsKey(strAa), String.Format("Err_9237g! Wrong result returned: {0}", _dic.ContainsKey(strAa)));
CultureInfo.DefaultThreadCurrentCulture = _danish;
_dic = new SortedList<String, String>(comparer);
_dic.Add(straA, value);
m_test.Eval(!_dic.ContainsKey(strAa), String.Format("Err_90723f! Wrong result returned: {0}", _dic.ContainsKey(strAa)));
}
catch (Exception ex)
{
m_test.Eval(false, String.Format("Err_387tsg! Wrong exception thrown: {0}", ex));
}
int[] negativeValues = { -1, -2, -5, Int32.MinValue };
comparer = StringComparer.CurrentCulture;
for (int i = 0; i < negativeValues.Length; i++)
{
try
{
_dic = new SortedList<String, String>(negativeValues[i], comparer);
m_test.Eval(false, String.Format("Err_387tsg! No exception thrown"));
}
catch (ArgumentOutOfRangeException)
{
}
catch (Exception ex)
{
m_test.Eval(false, String.Format("Err_387tsg! Wrong exception thrown: {0}", ex));
}
}
}
public void IkeyComparerOwnImplementation(int capacity)
{
//This just ensure that we can call our own implementation
SortedList<String, String> _dic;
IComparer<String> comparer = new MyOwnIKeyImplementation<String>();
try
{
CultureInfo.DefaultThreadCurrentCulture = _english;
_dic = new SortedList<String, String>(capacity, comparer);
_dic.Add(straA, value);
m_test.Eval(!_dic.ContainsKey(strAa), String.Format("Err_0237g! Wrong result returned: {0}", _dic.ContainsKey(strAa)));
CultureInfo.DefaultThreadCurrentCulture = _danish;
_dic = new SortedList<String, String>(comparer);
_dic.Add(straA, value);
m_test.Eval(!_dic.ContainsKey(strAa), String.Format("Err_00723f! Wrong result returned: {0}", _dic.ContainsKey(strAa)));
}
catch (Exception ex)
{
m_test.Eval(false, String.Format("Err_387tsg! Wrong exception thrown: {0}", ex));
}
}
}
public class Constructor_int_IKeyComparer
{
[Fact]
public static void RunTests()
{
//This mostly follows the format established by the original author of these tests
//These tests mostly uses the scenarios that were used in the individual constructors
Test test = new Test();
Driver<String, String> driver1 = new Driver<String, String>(test);
//Scenario 1: Pass all the enum values and ensure that the behavior is correct
int[] validCapacityValues = { 0, 1, 2, 5, 10, 16, 32, 50, 500, 5000, 10000 };
for (int i = 0; i < validCapacityValues.Length; i++)
driver1.TestVanilla(validCapacityValues[i]);
//Scenario 2: Parm validation: null for IKeyComparer and negative for capacity
driver1.TestParm();
//Scenario 3: Implement our own IKeyComparer and check
for (int i = 0; i < validCapacityValues.Length; i++)
driver1.IkeyComparerOwnImplementation(validCapacityValues[i]);
Assert.True(test.result);
}
}
//[Serializable]
internal class MyOwnIKeyImplementation<KeyType> : IComparer<KeyType>
{
public int GetHashCode(KeyType key)
{
//We cannot get the hascode that is culture aware here since TextInfo doesn't expose this functionality publicly
return key.GetHashCode();
}
public int Compare(KeyType key1, KeyType key2)
{
//We cannot get the hascode that is culture aware here since TextInfo doesn't expose this functionality publicly
return key1.GetHashCode();
}
public bool Equals(KeyType key1, KeyType key2)
{
return key1.Equals(key2);
}
}
}
| |
//---------------------------------------------------------------------
// <copyright file="Utils.cs" company="Microsoft">
// Copyright (c) Microsoft Corporation. All rights reserved.
// </copyright>
//
// @owner [....]
// @backupOwner [....]
//---------------------------------------------------------------------
namespace System.Data.EntityModel.SchemaObjectModel
{
using System;
using System.Data.Metadata.Edm;
using System.Diagnostics;
using System.Globalization;
using System.Text.RegularExpressions;
using System.Xml;
/// <summary>
/// Summary description for Utils.
/// </summary>
// make class internal when friend assemblies are available
internal static class Utils
{
#region Static Fields
// this is what we should be doing for CDM schemas
// the RegEx for valid identifiers are taken from the C# Language Specification (2.4.2 Identifiers)
// (except that we exclude _ as a valid starting character).
// This results in a somewhat smaller set of identifier from what System.CodeDom.Compiler.CodeGenerator.IsValidLanguageIndependentIdentifier
// allows. Not all identifiers allowed by IsValidLanguageIndependentIdentifier are valid in C#.IsValidLanguageIndependentIdentifier allows:
// Mn, Mc, and Pc as a leading character (which the spec and C# (at least for some Mn and Mc characters) do not allow)
// characters that Char.GetUnicodeCategory says are in Nl and Cf but which the RegEx does not accept (and which C# does allow).
//
// we could create the StartCharacterExp and OtherCharacterExp dynamically to force inclusion of the missing Nl and Cf characters...
private const string StartCharacterExp = @"[\p{Ll}\p{Lu}\p{Lt}\p{Lo}\p{Lm}\p{Nl}]";
private const string OtherCharacterExp = @"[\p{Ll}\p{Lu}\p{Lt}\p{Lo}\p{Lm}\p{Nl}\p{Mn}\p{Mc}\p{Nd}\p{Pc}\p{Cf}]";
private const string NameExp = StartCharacterExp+OtherCharacterExp+"{0,}";
//private static Regex ValidDottedName=new Regex(@"^"+NameExp+@"(\."+NameExp+@"){0,}$",RegexOptions.Singleline);
private static Regex UndottedNameValidator = new Regex(@"^"+NameExp+@"$",RegexOptions.Singleline | RegexOptions.Compiled );
#endregion
#region Static Methods
internal static void ExtractNamespaceAndName(SchemaDataModelOption dataModel, string qualifiedTypeName, out string namespaceName, out string name)
{
Debug.Assert(!string.IsNullOrEmpty(qualifiedTypeName), "qualifiedTypeName parameter is null");
GetBeforeAndAfterLastPeriod(qualifiedTypeName, out namespaceName, out name);
}
internal static string ExtractTypeName(SchemaDataModelOption dataModel, string qualifiedTypeName)
{
Debug.Assert(!string.IsNullOrEmpty(qualifiedTypeName), "qualifiedTypeName parameter is null or empty");
return GetEverythingAfterLastPeriod(qualifiedTypeName);
}
private static void GetBeforeAndAfterLastPeriod(string qualifiedTypeName, out string before, out string after)
{
int lastDot = qualifiedTypeName.LastIndexOf('.');
if (lastDot < 0)
{
before = null;
after = qualifiedTypeName;
}
else
{
before = qualifiedTypeName.Substring(0, lastDot);
after = qualifiedTypeName.Substring(lastDot + 1);
}
}
internal static string GetEverythingBeforeLastPeriod(string qualifiedTypeName)
{
int lastDot = qualifiedTypeName.LastIndexOf('.');
if (lastDot < 0)
return null;
return qualifiedTypeName.Substring(0, lastDot);
}
private static string GetEverythingAfterLastPeriod(string qualifiedTypeName)
{
int lastDot = qualifiedTypeName.LastIndexOf('.');
if (lastDot < 0)
return qualifiedTypeName;
return qualifiedTypeName.Substring(lastDot + 1);
}
/// <summary>
///
/// </summary>
/// <param name="schema"></param>
/// <param name="reader"></param>
/// <param name="value"></param>
/// <returns></returns>
public static bool GetString(Schema schema, XmlReader reader, out string value)
{
Debug.Assert(schema != null, "schema parameter is null");
Debug.Assert(reader != null, "reader parameter is null");
if (reader.SchemaInfo.Validity == System.Xml.Schema.XmlSchemaValidity.Invalid)
{
// an error has already been issued by the xsd validation
value = null;
return false;
}
value = reader.Value;
if ( string.IsNullOrEmpty(value) )
{
schema.AddError( ErrorCode.InvalidName, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.InvalidName(value, reader.Name));
return false;
}
return true;
}
/// <summary>
///
/// </summary>
/// <param name="schema"></param>
/// <param name="reader"></param>
/// <param name="name"></param>
/// <returns></returns>
public static bool GetDottedName(Schema schema, XmlReader reader,out string name)
{
if (!GetString(schema, reader, out name))
{
return false;
}
return ValidateDottedName(schema, reader, name);
}
internal static bool ValidateDottedName(Schema schema, XmlReader reader, string name)
{
Debug.Assert(schema != null, "schema parameter is null");
Debug.Assert(reader != null, "reader parameter is null");
Debug.Assert(!string.IsNullOrEmpty(name), "name parameter is null or empty");
Debug.Assert(reader.SchemaInfo.Validity != System.Xml.Schema.XmlSchemaValidity.Invalid, "This method should not be called when the schema is invalid");
if (schema.DataModel == SchemaDataModelOption.EntityDataModel)
{
// each part of the dotted name needs to be a valid name
foreach (string namePart in name.Split('.'))
{
if (!ValidUndottedName(namePart))
{
schema.AddError(ErrorCode.InvalidName, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.InvalidName(name, reader.Name));
return false;
}
}
}
return true;
}
/// <summary>
///
/// </summary>
/// <param name="schema"></param>
/// <param name="reader"></param>
/// <param name="name"></param>
/// <returns></returns>
public static bool GetUndottedName(Schema schema,XmlReader reader,out string name)
{
Debug.Assert(schema != null, "schema parameter is null");
Debug.Assert(reader != null, "reader parameter is null");
if (reader.SchemaInfo.Validity == System.Xml.Schema.XmlSchemaValidity.Invalid)
{
// the xsd already put in an error
name = null;
return false;
}
name = reader.Value;
if (string.IsNullOrEmpty(name))
{
schema.AddError( ErrorCode.InvalidName, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.EmptyName(reader.Name));
return false;
}
if (schema.DataModel == SchemaDataModelOption.EntityDataModel &&
!ValidUndottedName(name) )
{
schema.AddError( ErrorCode.InvalidName, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.InvalidName(name,reader.Name));
return false;
}
Debug.Assert(!(schema.DataModel == SchemaDataModelOption.EntityDataModel && name.IndexOf('.') >= 0),
string.Format(CultureInfo.CurrentCulture, "{1} ({0}) is not valid. {1} cannot be qualified.", name, reader.Name));
return true;
}
/// <summary>
///
/// </summary>
/// <param name="name"></param>
/// <returns></returns>
internal static bool ValidUndottedName(string name)
{
// CodeGenerator.IsValidLanguageIndependentIdentifier does demand a FullTrust Link
// but this is safe since the function only walks over the string no risk is introduced
return !string.IsNullOrEmpty(name) && UndottedNameValidator.IsMatch(name)
&& IsValidLanguageIndependentIdentifier(name);
}
[System.Diagnostics.CodeAnalysis.SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
[System.Security.SecuritySafeCritical]
private static bool IsValidLanguageIndependentIdentifier(string name)
{
return System.CodeDom.Compiler.CodeGenerator.IsValidLanguageIndependentIdentifier(name);
}
/// <summary>
///
/// </summary>
/// <param name="schema"></param>
/// <param name="reader"></param>
/// <param name="value"></param>
/// <returns></returns>
public static bool GetBool(Schema schema, XmlReader reader, out bool value)
{
Debug.Assert(schema != null, "schema parameter is null");
Debug.Assert(reader != null, "reader parameter is null");
if ( reader.SchemaInfo.Validity == System.Xml.Schema.XmlSchemaValidity.Invalid )
{
value = true; // we have to set the value to something before returning.
return false;
}
// do this in a try catch, just in case the attribute wasn't validated against an xsd:boolean
try
{
value = reader.ReadContentAsBoolean();
return true;
}
catch (System.Xml.XmlException)
{
// we already handled the valid and invalid cases, so it must be NotKnown now.
Debug.Assert(reader.SchemaInfo.Validity == Xml.Schema.XmlSchemaValidity.NotKnown, "The schema validity must be NotKnown at this point");
schema.AddError(ErrorCode.BoolValueExpected, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.ValueNotUnderstood(reader.Value, reader.Name));
}
value = true; // we have to set the value to something before returning.
return false;
}
public static bool GetInt(Schema schema,XmlReader reader,out int value)
{
Debug.Assert(schema != null, "schema parameter is null");
Debug.Assert(reader != null, "reader parameter is null");
if (reader.SchemaInfo.Validity == System.Xml.Schema.XmlSchemaValidity.Invalid)
{
// an error has already been issued by the xsd validation
value = 0; ;
return false;
}
string text = reader.Value;
value = int.MinValue;
if ( int.TryParse(text,NumberStyles.Integer,System.Globalization.CultureInfo.InvariantCulture,out value) )
return true;
schema.AddError( ErrorCode.IntegerExpected, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.ValueNotUnderstood(reader.Value,reader.Name));
return false;
}
public static bool GetByte(Schema schema,XmlReader reader,out byte value)
{
Debug.Assert(schema != null, "schema parameter is null");
Debug.Assert(reader != null, "reader parameter is null");
if (reader.SchemaInfo.Validity == System.Xml.Schema.XmlSchemaValidity.Invalid)
{
// an error has already been issued by the xsd validation
value = 0; ;
return false;
}
string text = reader.Value;
value = byte.MinValue;
if (byte.TryParse(text, NumberStyles.Integer, System.Globalization.CultureInfo.InvariantCulture, out value))
{
return true;
}
schema.AddError( ErrorCode.ByteValueExpected, EdmSchemaErrorSeverity.Error, reader,
System.Data.Entity.Strings.ValueNotUnderstood(reader.Value, reader.Name));
return false;
}
/// <summary>
///
/// </summary>
/// <param name="lhsName"></param>
/// <param name="rhsName"></param>
/// <returns></returns>
public static int CompareNames(string lhsName, string rhsName)
{
return string.Compare(lhsName,rhsName,StringComparison.Ordinal);
}
#endregion
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABILITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.IO;
using System.Net;
using System.Net.WebSockets;
using System.Threading;
using System.Threading.Tasks;
using System.Windows.Forms;
using Microsoft.PythonTools.Infrastructure;
using Microsoft.PythonTools.Ipc.Json;
using Microsoft.VisualStudio.Debugger.Interop;
using LDP = Microsoft.PythonTools.Debugger.LegacyDebuggerProtocol;
namespace Microsoft.PythonTools.Debugger.Remote {
internal class PythonRemoteDebugProcess : IDebugProcess2, IDebugProcessSecurity2 {
private readonly PythonRemoteDebugPort _port;
private readonly int _pid;
private readonly string _exe;
private readonly string _username;
private readonly string _version;
public PythonRemoteDebugProcess(PythonRemoteDebugPort port, int pid, string exe, string username, string version) {
this._port = port;
this._pid = (pid == 0) ? 1 : pid; // attach dialog won't show processes with pid==0
this._username = username;
this._exe = string.IsNullOrEmpty(exe) ? "<python>" : exe;
this._version = version;
}
public PythonRemoteDebugPort DebugPort {
get { return _port; }
}
public int Attach(IDebugEventCallback2 pCallback, Guid[] rgguidSpecificEngines, uint celtSpecificEngines, int[] rghrEngineAttach) {
throw new NotImplementedException();
}
public int CanDetach() {
return 0; // S_OK = true, S_FALSE = false
}
public int CauseBreak() {
return 0; // S_OK = true, S_FALSE = false
}
public int Detach() {
throw new NotImplementedException();
}
public int EnumPrograms(out IEnumDebugPrograms2 ppEnum) {
ppEnum = new PythonRemoteEnumDebugPrograms(this);
return 0;
}
public int EnumThreads(out IEnumDebugThreads2 ppEnum) {
throw new NotImplementedException();
}
public int GetAttachedSessionName(out string pbstrSessionName) {
throw new NotImplementedException();
}
public int GetInfo(enum_PROCESS_INFO_FIELDS Fields, PROCESS_INFO[] pProcessInfo) {
// The various string fields should match the strings returned by GetName - keep them in sync when making any changes here.
var pi = new PROCESS_INFO();
pi.Fields = Fields;
pi.bstrFileName = _exe;
pi.bstrBaseName = BaseName;
pi.bstrTitle = Title;
pi.ProcessId.dwProcessId = (uint)_pid;
pProcessInfo[0] = pi;
return 0;
}
public int GetName(enum_GETNAME_TYPE gnType, out string pbstrName) {
// The return value should match the corresponding string field returned from GetInfo - keep them in sync when making any changes here.
switch (gnType) {
case enum_GETNAME_TYPE.GN_FILENAME:
pbstrName = _exe;
break;
case enum_GETNAME_TYPE.GN_BASENAME:
pbstrName = BaseName;
break;
case enum_GETNAME_TYPE.GN_NAME:
case enum_GETNAME_TYPE.GN_TITLE:
pbstrName = _version;
break;
default:
pbstrName = null;
break;
}
return 0;
}
public int GetPhysicalProcessId(AD_PROCESS_ID[] pProcessId) {
var pidStruct = new AD_PROCESS_ID();
pidStruct.dwProcessId = (uint)_pid;
pProcessId[0] = pidStruct;
return 0;
}
public int GetPort(out IDebugPort2 ppPort) {
ppPort = _port;
return 0;
}
public int GetProcessId(out Guid pguidProcessId) {
pguidProcessId = Guid.Empty;
return 0;
}
public int GetServer(out IDebugCoreServer2 ppServer) {
throw new NotImplementedException();
}
public int Terminate() {
throw new NotImplementedException();
}
public int GetUserName(out string pbstrUserName) {
pbstrUserName = _username;
return 0;
}
public int QueryCanSafelyAttach() {
return 0; // S_OK = true, S_FALSE = false
}
// AzureExplorerAttachDebuggerCommand looks up remote processes by name, and has to be updated if the format of this property changes.
private string BaseName {
get {
string fileName = PathUtils.GetFileOrDirectoryName(_exe);
if (string.IsNullOrEmpty(fileName)) {
fileName = _exe;
}
// Strip out the secret to avoid showing it in the process list.
return fileName + " @ " + new UriBuilder(_port.Uri) { UserName = null };
}
}
private string Title {
get { return _version; }
}
public static async Task<PythonRemoteDebugProcess> ConnectAsync(PythonRemoteDebugPort port, TextWriter debugLog, CancellationToken ct) {
PythonRemoteDebugProcess process = null;
// Connect to the remote debugging server and obtain process information. If any errors occur, display an error dialog, and keep
// trying for as long as user clicks "Retry".
while (true) {
DebugConnection debugConn = null;
ConnectionException connEx = null;
try {
// Process information is not sensitive, so ignore any SSL certificate errors, rather than bugging the user with warning dialogs.
debugConn = await PythonRemoteProcess.ConnectAsync(port.Uri, false, debugLog, ct);
} catch (ConnectionException ex) {
connEx = ex;
}
using (debugConn) {
if (debugConn != null) {
try {
var response = await debugConn.SendRequestAsync(new LDP.RemoteDebuggerInfoRequest(), ct);
process = new PythonRemoteDebugProcess(port, response.processId, response.executable, response.user, response.pythonVersion);
break;
} catch (IOException ex) {
connEx = new ConnectionException(ConnErrorMessages.RemoteNetworkError, ex);
} catch (FailedRequestException ex) {
connEx = new ConnectionException(ConnErrorMessages.RemoteNetworkError, ex);
}
}
if (connEx != null) {
string errText;
switch (connEx.Error) {
case ConnErrorMessages.RemoteUnsupportedServer:
errText = Strings.RemoteUnsupportedServer_Host.FormatUI(port.Uri);
break;
case ConnErrorMessages.RemoteSecretMismatch:
errText = Strings.RemoteSecretMismatch_Host.FormatUI(new UriBuilder(port.Uri) { UserName = null, Password = null }.Uri);
break;
case ConnErrorMessages.RemoteSslError:
// User has already got a warning dialog and clicked "Cancel" on that, so no further prompts are needed.
return null;
default:
{
// Azure uses HTTP 503 (Service Unavailable) to indicate that websocket connections are not supported. Show a special error message for that.
var wsEx = connEx.InnerException as WebSocketException;
if (wsEx != null) {
var webEx = wsEx.InnerException as WebException;
if (webEx != null) {
var httpResponse = webEx.Response as HttpWebResponse;
if (httpResponse != null && httpResponse.StatusCode == HttpStatusCode.ServiceUnavailable) {
errText = Strings.RemoteAzureServiceUnavailable_Host.FormatUI(port.Uri);
break;
}
}
}
errText = Strings.RemoteServiceUnavailable_Host.FormatUI(port.Uri);
for (var ex = connEx.InnerException; ex != null; ex = ex.InnerException) {
if (ex.InnerException == null) {
errText += "\r\n\r\n{0}\r\n{1}".FormatUI(Strings.AdditionalInformation, ex.Message);
}
}
break;
}
}
DialogResult dlgRes = MessageBox.Show(errText, Strings.ProductTitle, MessageBoxButtons.RetryCancel, MessageBoxIcon.Error);
if (dlgRes != DialogResult.Retry) {
break;
}
}
}
}
return process;
}
}
}
| |
// Python Tools for Visual Studio
// Copyright(c) Microsoft Corporation
// All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the License); you may not use
// this file except in compliance with the License. You may obtain a copy of the
// License at http://www.apache.org/licenses/LICENSE-2.0
//
// THIS CODE IS PROVIDED ON AN *AS IS* BASIS, WITHOUT WARRANTIES OR CONDITIONS
// OF ANY KIND, EITHER EXPRESS OR IMPLIED, INCLUDING WITHOUT LIMITATION ANY
// IMPLIED WARRANTIES OR CONDITIONS OF TITLE, FITNESS FOR A PARTICULAR PURPOSE,
// MERCHANTABLITY OR NON-INFRINGEMENT.
//
// See the Apache Version 2.0 License for specific language governing
// permissions and limitations under the License.
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Net;
using System.Runtime.InteropServices;
using System.Text;
using System.Text.RegularExpressions;
using System.Windows.Forms;
using System.Xml;
using System.Xml.XPath;
using Microsoft.Win32;
namespace AzureSetup {
class Program {
static readonly string[] SystemPackages = new[] {
"IIS-WebServerRole",
"IIS-WebServer",
"IIS-CommonHttpFeatures",
"IIS-StaticContent",
"IIS-DefaultDocument",
"IIS-DirectoryBrowsing",
"IIS-HttpErrors",
"IIS-HealthAndDiagnostics",
"IIS-HttpLogging",
"IIS-LoggingLibraries",
"IIS-RequestMonitor",
"IIS-Security",
"IIS-RequestFiltering",
"IIS-HttpCompressionStatic",
"IIS-WebServerManagementTools",
"IIS-ManagementConsole",
"WAS-WindowsActivationService",
"WAS-ProcessModel",
"WAS-NetFxEnvironment",
"WAS-ConfigurationAPI",
"IIS-CGI"
};
[STAThread]
static void Main(string[] args) {
try {
ConfigureFastCgi();
} catch (Exception ex) {
var logMessage = string.Format("Failed to initialize role.\r\n\r\nException:\r\n{0}", ex);
bool wroteLog = false;
try {
File.AppendAllText("AzureSetup.log", logMessage);
wroteLog = true;
} catch {
Console.Error.WriteLine("Unable to write to log file.");
}
if (IsEmulated()) {
// In the emulator, so display some UI.
MessageBox.Show(
string.Format("Initialization failed due to a {0}:\r\n {1}.", ex.GetType().Name, ex.Message),
"Python Tools for Visual Studio"
);
}
if (!wroteLog) {
// Failed to write to the log, so rethrow the exception so
// it will be shown on the console, will be added to the
// event log, and also sent via WER.
throw;
}
}
}
public static void ConfigureFastCgi() {
var psi = new ProcessStartInfo();
// pkgmgr.exe is deprecated for Windows 7/Server 2008R2 and later in
// favor of dism.exe, and displays UI when invoked on these
// platforms.
// If dism.exe exists, we will use that; otherwise, we will fall
// back on pkgmgr.exe. The feature names are identical between the
// two programs, though the command lines differ slightly.
var dismPath = Path.Combine(Environment.GetEnvironmentVariable("windir"), "System32", "dism.exe");
var pkgMgrPath = Path.Combine(Environment.GetEnvironmentVariable("windir"), "System32", "PkgMgr.exe");
// enable FastCGI in IIS
if (File.Exists(dismPath)) {
psi.FileName = dismPath;
psi.Arguments = "/Online /NoRestart /Enable-Feature /FeatureName:" + string.Join(" /FeatureName:", SystemPackages);
} else if (File.Exists(pkgMgrPath)) {
psi.FileName = pkgMgrPath;
psi.Arguments = "/quiet /iu:" + string.Join(";", SystemPackages);
} else {
Console.Error.WriteLine("Unable to install IIS FastCGI features.");
psi = null;
}
if (psi != null) {
psi.CreateNoWindow = true;
psi.WindowStyle = ProcessWindowStyle.Hidden;
using (var proc = Process.Start(psi)) {
proc.WaitForExit();
}
}
// Crack RoleModel.xml to figure out where our site lives...
var roleRoot = Environment.GetEnvironmentVariable("RoleRoot");
// In the cloud the RoleRoot is "E:" instead of "E:\", so we
// manually add the backslash to ensure it is not interpreted as a
// relative path.
if (!roleRoot.EndsWith("\\")) {
roleRoot = roleRoot + "\\";
}
var doc = new XPathDocument(Path.Combine(roleRoot, "RoleModel.xml"));
var navigator = doc.CreateNavigator();
XmlNamespaceManager mngr = new XmlNamespaceManager(new NameTable());
mngr.AddNamespace("sd", "http://schemas.microsoft.com/ServiceHosting/2008/10/ServiceDefinition");
var nodes = navigator.Select("/sd:RoleModel/sd:Sites/sd:Site", mngr);
string physicalDir = null;
foreach (XPathNavigator node in nodes) {
// TODO: Multiple sites?
physicalDir = node.GetAttribute("physicalDirectory", "");
break;
}
if (!Path.IsPathRooted(physicalDir)) {
physicalDir = Path.Combine(roleRoot, physicalDir);
}
nodes = navigator.Select("/sd:RoleModel/sd:Properties/sd:Property[@name='Configuration']", mngr);
bool isDebug = false;
foreach (XPathNavigator node in nodes) {
isDebug |= node.GetAttribute("value", "") == "Debug";
}
string interpreter = null, interpreterEmulated = null;
if (physicalDir != null) {
string fastCgiPath = "\"" + Path.Combine(physicalDir, "bin", "wfastcgi.py") + "\"";
string settingsName = null, pythonPath = null;
string setupCfg = Path.Combine(physicalDir, "bin", "AzureSetup.cfg");
List<string> webpiInstalls = new List<string>();
if (File.Exists(setupCfg)) {
try {
var allLines = File.ReadAllLines(setupCfg);
foreach (var line in allLines) {
var curOptions = line.Split(new[] { '=' }, 2);
if (curOptions.Length == 2) {
switch (curOptions[0]) {
case "settings_module":
settingsName = curOptions[1];
break;
case "python_path":
pythonPath = Environment.ExpandEnvironmentVariables(
Regex.Replace(
curOptions[1],
Regex.Escape("%RootDir%"),
physicalDir,
RegexOptions.IgnoreCase
)
);
break;
case "interpreter_path":
interpreter = Environment.ExpandEnvironmentVariables(
Regex.Replace(
curOptions[1],
Regex.Escape("%RootDir%"),
physicalDir,
RegexOptions.IgnoreCase
)
);
break;
case "interpreter_path_emulated":
interpreterEmulated = Environment.ExpandEnvironmentVariables(
Regex.Replace(
curOptions[1],
Regex.Escape("%RootDir%"),
physicalDir,
RegexOptions.IgnoreCase
)
);
break;
case "webpi_install":
webpiInstalls.Add(
curOptions[1]
);
break;
}
}
}
} catch (IOException) {
}
}
InstallWebPiProducts(physicalDir, webpiInstalls);
if (interpreterEmulated != null && IsEmulated()) {
interpreter = interpreterEmulated;
}
if (String.IsNullOrEmpty(interpreter)) {
// TODO: Better discovery....
interpreter = Path.Combine(
Environment.GetEnvironmentVariable("SystemDrive") + "\\",
"Python27",
"python.exe"
);
}
if (settingsName == null) {
if (physicalDir.Length > 0 && physicalDir[physicalDir.Length - 1] == Path.DirectorySeparatorChar) {
settingsName = Path.GetFileName(physicalDir.Substring(0, physicalDir.Length - 1));
} else {
settingsName = Path.GetFileName(physicalDir);
}
settingsName += ".settings";
}
if (pythonPath == null) {
pythonPath = Path.Combine(physicalDir, "..");
}
UpdateIISAppCmd(interpreter, physicalDir, isDebug, fastCgiPath);
UpdateWebConfig(interpreter, physicalDir, fastCgiPath);
}
}
private static string GetWebPICommand(string physicalDir) {
string webpiSetup, webpiCmdName, webpiMsiName;
if (Environment.Is64BitOperatingSystem) {
webpiSetup = "https://go.microsoft.com/fwlink/?linkid=226239";
webpiCmdName = "webpicmd-x64.exe";
webpiMsiName = "WebPlatformInstaller_amd64_en-US.msi";
} else {
webpiSetup = "https://go.microsoft.com/fwlink/?linkid=226238";
webpiCmdName = "webpicmd.exe";
webpiMsiName = "WebPlatformInstaller_x86_en-US.msi";
}
string path = Path.Combine(physicalDir, "bin", "WebPICmdLine.exe");
if (File.Exists(path)) {
return path;
}
path = Path.Combine(
Environment.GetFolderPath(Environment.SpecialFolder.ProgramFiles),
"Microsoft",
"Web Platform Installer",
webpiCmdName
);
if (File.Exists(path)) {
return path;
}
var webpiMsi = Path.Combine(physicalDir, webpiMsiName);
if (!File.Exists(webpiMsi)) {
webpiMsi = Path.Combine(physicalDir, "bin", webpiMsiName);
if (!File.Exists(webpiMsi)) {
var req = WebRequest.Create(webpiSetup);
var resp = req.GetResponse();
var stream = resp.GetResponseStream();
using (var msi = File.OpenWrite(webpiMsi)) {
var buffer = new byte[4096];
int bytesRead;
while ((bytesRead = stream.Read(buffer, 0, buffer.Length)) > 0) {
msi.Write(buffer, 0, bytesRead);
}
}
}
}
using (var p = Process.Start("msiexec", "/quiet /i \"" + webpiMsi + "\" ADDLOCAL=ALL")) {
p.WaitForExit();
}
return File.Exists(path) ? path : null;
}
private static void UpdateIISAppCmd(string interpreter, string physicalDir, bool isDebug, string fastCgiPath) {
var appCmd = Environment.GetEnvironmentVariable("APPCMD");
if (String.IsNullOrEmpty(appCmd)) {
appCmd = Path.Combine(Environment.GetEnvironmentVariable("WINDIR"), "System32", "inetsrv", "appcmd.exe");
}
interpreter = Escape(interpreter);
fastCgiPath = Escape(fastCgiPath);
RunAppCmd(appCmd,
"set config /section:system.webServer/fastCGI \"/+[fullPath='{0}', arguments='{1}', instanceMaxRequests='{2}', signalBeforeTerminateSeconds='30']\"",
interpreter,
fastCgiPath,
isDebug ? "1" : "10000"
);
}
private static void AppCmdSetProperty(string interpreter, string fastCgiPath, string appCmd, string propertyName, string value) {
RunAppCmd(appCmd,
"set config -section:system.webServer/fastCgi \"^/[fullPath='{0}', arguments='{1}'].{2}:{3}\"",
interpreter,
fastCgiPath,
propertyName,
value
);
}
private static void AppCmdSetEnv(string interpreter, string fastCgiPath, string appCmd, string varName, string value) {
RunAppCmd(appCmd,
"set config -section:system.webServer/fastCgi /+\"[fullPath='{0}', arguments='{1}'].environmentVariables.[name='{2}',value='{3}']\"",
interpreter,
fastCgiPath,
varName,
Escape(value)
);
}
private static string Escape(string interpreter) {
// http://msdn.microsoft.com/en-us/library/bb776391(VS.85).aspx
// 2n backslashes followed by a quotation mark produce n backslashes followed by a quotation mark.
// (2n) + 1 backslashes followed by a quotation mark again produce n backslashes followed by a quotation mark.
// n backslashes not followed by a quotation mark simply produce n backslashes.
StringBuilder res = new StringBuilder();
int backslashCount = 0;
for (int i = 0; i < interpreter.Length; i++) {
if (interpreter[i] == '"') {
for (int j = 0; j < backslashCount; j++) {
res.Append('\\');
}
res.Append("\\\"");
backslashCount = 0;
} else if (interpreter[i] == '\\') {
backslashCount++;
} else {
for (int j = 0; j < backslashCount; j++) {
res.Append('\\');
}
res.Append(interpreter[i]);
backslashCount = 0;
}
}
return res.ToString();
}
private static void RunAppCmd(string appCmd, string argStr, params string[] args) {
string fullArgs = String.Format(argStr, args);
var appCmdEnd = appCmd.IndexOf("appcmd.exe", StringComparison.OrdinalIgnoreCase);
if (appCmdEnd != -1) {
if (appCmd[0] == '\"') {
// "D:\Program Files\IIS Express\appcmd.exe"
var closeQuote = appCmdEnd + "appcmd.exe".Length;
if (closeQuote < appCmd.Length &&
appCmd[closeQuote] == '"') {
appCmdEnd++;
}
}
var appCmdCmd = appCmd.Substring(0, appCmdEnd + "appcmd.exe".Length);
fullArgs = fullArgs + appCmd.Substring(appCmdCmd.Length);
var psi = new ProcessStartInfo(appCmdCmd, fullArgs);
psi.UseShellExecute = false;
var proc = Process.Start(psi);
proc.WaitForExit();
}
}
private static void UpdateWebConfig(string interpreter, string physicalDir, string fastCgiPath) {
// patch web.config w/ the correct path to our fast cgi script
var webCloudConfig = Path.Combine(physicalDir, "web.cloud.config");
var webConfig = Path.Combine(physicalDir, "web.config");
string readFrom;
if (!IsEmulated() && File.Exists(webCloudConfig)) {
readFrom = webCloudConfig;
} else {
readFrom = webConfig;
}
var text = File.ReadAllText(readFrom);
text = Regex.Replace(text, "%WFASTCGIPATH%", fastCgiPath.Replace("\"", """), RegexOptions.IgnoreCase);
text = Regex.Replace(text, "%INTERPRETERPATH%", interpreter, RegexOptions.IgnoreCase);
text = Regex.Replace(text, "%ROOTDIR%", physicalDir, RegexOptions.IgnoreCase);
File.WriteAllText(webConfig, text);
}
private static void InstallWebPiProducts(string physicalDir, List<string> webpiInstalls) {
if (IsEmulated() || webpiInstalls.Count == 0) {
// Don't run installs in the emulator
return;
}
// Get WebPI. This may download and install it if it is not already
// available.
string webpiCmdLinePath = GetWebPICommand(physicalDir);
if (!File.Exists(webpiCmdLinePath)) {
return;
}
// Deal w/ 32-bit vs 64-bit folder redirection of SYSTEM account...
// http://blog.smarx.com/posts/windows-azure-startup-tasks-tips-tricks-and-gotchas
// http://www.davidaiken.com/2011/01/19/running-azure-startup-tasks-as-a-real-user/
// We will create a new directory and set our local app data to be there.
var name = "AppData" + Guid.NewGuid();
string dir;
for (; ; ) {
dir = Path.Combine(
Environment.GetEnvironmentVariable("SystemDrive") + "\\",
"SystemAppData" + Path.GetRandomFileName()
);
if (Directory.Exists(dir)) {
continue;
}
Directory.CreateDirectory(dir);
break;
}
const string userShellFolders = ".DEFAULT\\Software\\Microsoft\\Windows\\CurrentVersion\\Explorer\\User Shell Folders";
const string localAppData = "Local AppData";
using (var key = Registry.Users.OpenSubKey(userShellFolders, true)) {
var oldValue = key.GetValue(localAppData, null, RegistryValueOptions.DoNotExpandEnvironmentNames);
key.SetValue(localAppData, dir);
try {
// setup any installed products via WebPI...
foreach (var install in webpiInstalls) {
var paths = install.Split(new[] { ';' }, 2);
if (paths.Length == 2) {
using (var p = Process.Start(
webpiCmdLinePath,
"/Install " +
"/AcceptEula " +
"/Feeds:\"" + paths[0] + "\" " +
"/Products:" + paths[1]
)) {
p.WaitForExit();
}
}
}
} finally {
key.SetValue(localAppData, oldValue, RegistryValueKind.ExpandString);
}
}
}
private static bool IsEmulated() {
string emulated = Environment.GetEnvironmentVariable("EMULATED");
bool res;
if (String.IsNullOrEmpty(emulated) || !Boolean.TryParse(emulated, out res)) {
return false;
}
return res;
}
}
}
| |
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
// specific language governing permissions and limitations under the License.
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by google-apis-code-generator 1.5.1
// C# generator version: 1.38.0
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
/**
* \brief
* Indexing API Version v3
*
* \section ApiInfo API Version Information
* <table>
* <tr><th>API
* <td><a href='https://developers.google.com/search/apis/indexing-api/'>Indexing API</a>
* <tr><th>API Version<td>v3
* <tr><th>API Rev<td>20190131 (1491)
* <tr><th>API Docs
* <td><a href='https://developers.google.com/search/apis/indexing-api/'>
* https://developers.google.com/search/apis/indexing-api/</a>
* <tr><th>Discovery Name<td>indexing
* </table>
*
* \section ForMoreInfo For More Information
*
* The complete API documentation for using Indexing API can be found at
* <a href='https://developers.google.com/search/apis/indexing-api/'>https://developers.google.com/search/apis/indexing-api/</a>.
*
* For more information about the Google APIs Client Library for .NET, see
* <a href='https://developers.google.com/api-client-library/dotnet/get_started'>
* https://developers.google.com/api-client-library/dotnet/get_started</a>
*/
namespace Google.Apis.Indexing.v3
{
/// <summary>The Indexing Service.</summary>
public class IndexingService : Google.Apis.Services.BaseClientService
{
/// <summary>The API version.</summary>
public const string Version = "v3";
/// <summary>The discovery version used to generate this service.</summary>
public static Google.Apis.Discovery.DiscoveryVersion DiscoveryVersionUsed =
Google.Apis.Discovery.DiscoveryVersion.Version_1_0;
/// <summary>Constructs a new service.</summary>
public IndexingService() :
this(new Google.Apis.Services.BaseClientService.Initializer()) {}
/// <summary>Constructs a new service.</summary>
/// <param name="initializer">The service initializer.</param>
public IndexingService(Google.Apis.Services.BaseClientService.Initializer initializer)
: base(initializer)
{
urlNotifications = new UrlNotificationsResource(this);
}
/// <summary>Gets the service supported features.</summary>
public override System.Collections.Generic.IList<string> Features
{
get { return new string[0]; }
}
/// <summary>Gets the service name.</summary>
public override string Name
{
get { return "indexing"; }
}
/// <summary>Gets the service base URI.</summary>
public override string BaseUri
{
get { return "https://indexing.googleapis.com/"; }
}
/// <summary>Gets the service base path.</summary>
public override string BasePath
{
get { return ""; }
}
#if !NET40
/// <summary>Gets the batch base URI; <c>null</c> if unspecified.</summary>
public override string BatchUri
{
get { return "https://indexing.googleapis.com/batch"; }
}
/// <summary>Gets the batch base path; <c>null</c> if unspecified.</summary>
public override string BatchPath
{
get { return "batch"; }
}
#endif
/// <summary>Available OAuth 2.0 scopes for use with the Indexing API.</summary>
public class Scope
{
/// <summary>Submit data to Google for indexing</summary>
public static string Indexing = "https://www.googleapis.com/auth/indexing";
}
/// <summary>Available OAuth 2.0 scope constants for use with the Indexing API.</summary>
public static class ScopeConstants
{
/// <summary>Submit data to Google for indexing</summary>
public const string Indexing = "https://www.googleapis.com/auth/indexing";
}
private readonly UrlNotificationsResource urlNotifications;
/// <summary>Gets the UrlNotifications resource.</summary>
public virtual UrlNotificationsResource UrlNotifications
{
get { return urlNotifications; }
}
}
///<summary>A base abstract class for Indexing requests.</summary>
public abstract class IndexingBaseServiceRequest<TResponse> : Google.Apis.Requests.ClientServiceRequest<TResponse>
{
///<summary>Constructs a new IndexingBaseServiceRequest instance.</summary>
protected IndexingBaseServiceRequest(Google.Apis.Services.IClientService service)
: base(service)
{
}
/// <summary>V1 error format.</summary>
[Google.Apis.Util.RequestParameterAttribute("$.xgafv", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<XgafvEnum> Xgafv { get; set; }
/// <summary>V1 error format.</summary>
public enum XgafvEnum
{
/// <summary>v1 error format</summary>
[Google.Apis.Util.StringValueAttribute("1")]
Value1,
/// <summary>v2 error format</summary>
[Google.Apis.Util.StringValueAttribute("2")]
Value2,
}
/// <summary>OAuth access token.</summary>
[Google.Apis.Util.RequestParameterAttribute("access_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string AccessToken { get; set; }
/// <summary>Data format for response.</summary>
/// [default: json]
[Google.Apis.Util.RequestParameterAttribute("alt", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<AltEnum> Alt { get; set; }
/// <summary>Data format for response.</summary>
public enum AltEnum
{
/// <summary>Responses with Content-Type of application/json</summary>
[Google.Apis.Util.StringValueAttribute("json")]
Json,
/// <summary>Media download with context-dependent Content-Type</summary>
[Google.Apis.Util.StringValueAttribute("media")]
Media,
/// <summary>Responses with Content-Type of application/x-protobuf</summary>
[Google.Apis.Util.StringValueAttribute("proto")]
Proto,
}
/// <summary>JSONP</summary>
[Google.Apis.Util.RequestParameterAttribute("callback", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Callback { get; set; }
/// <summary>Selector specifying which fields to include in a partial response.</summary>
[Google.Apis.Util.RequestParameterAttribute("fields", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Fields { get; set; }
/// <summary>API key. Your API key identifies your project and provides you with API access, quota, and reports.
/// Required unless you provide an OAuth 2.0 token.</summary>
[Google.Apis.Util.RequestParameterAttribute("key", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Key { get; set; }
/// <summary>OAuth 2.0 token for the current user.</summary>
[Google.Apis.Util.RequestParameterAttribute("oauth_token", Google.Apis.Util.RequestParameterType.Query)]
public virtual string OauthToken { get; set; }
/// <summary>Returns response with indentations and line breaks.</summary>
/// [default: true]
[Google.Apis.Util.RequestParameterAttribute("prettyPrint", Google.Apis.Util.RequestParameterType.Query)]
public virtual System.Nullable<bool> PrettyPrint { get; set; }
/// <summary>Available to use for quota purposes for server-side applications. Can be any arbitrary string
/// assigned to a user, but should not exceed 40 characters.</summary>
[Google.Apis.Util.RequestParameterAttribute("quotaUser", Google.Apis.Util.RequestParameterType.Query)]
public virtual string QuotaUser { get; set; }
/// <summary>Legacy upload protocol for media (e.g. "media", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("uploadType", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadType { get; set; }
/// <summary>Upload protocol for media (e.g. "raw", "multipart").</summary>
[Google.Apis.Util.RequestParameterAttribute("upload_protocol", Google.Apis.Util.RequestParameterType.Query)]
public virtual string UploadProtocol { get; set; }
/// <summary>Initializes Indexing parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add(
"$.xgafv", new Google.Apis.Discovery.Parameter
{
Name = "$.xgafv",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"access_token", new Google.Apis.Discovery.Parameter
{
Name = "access_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"alt", new Google.Apis.Discovery.Parameter
{
Name = "alt",
IsRequired = false,
ParameterType = "query",
DefaultValue = "json",
Pattern = null,
});
RequestParameters.Add(
"callback", new Google.Apis.Discovery.Parameter
{
Name = "callback",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"fields", new Google.Apis.Discovery.Parameter
{
Name = "fields",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"key", new Google.Apis.Discovery.Parameter
{
Name = "key",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"oauth_token", new Google.Apis.Discovery.Parameter
{
Name = "oauth_token",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"prettyPrint", new Google.Apis.Discovery.Parameter
{
Name = "prettyPrint",
IsRequired = false,
ParameterType = "query",
DefaultValue = "true",
Pattern = null,
});
RequestParameters.Add(
"quotaUser", new Google.Apis.Discovery.Parameter
{
Name = "quotaUser",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"uploadType", new Google.Apis.Discovery.Parameter
{
Name = "uploadType",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
RequestParameters.Add(
"upload_protocol", new Google.Apis.Discovery.Parameter
{
Name = "upload_protocol",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>The "urlNotifications" collection of methods.</summary>
public class UrlNotificationsResource
{
private const string Resource = "urlNotifications";
/// <summary>The service which this resource belongs to.</summary>
private readonly Google.Apis.Services.IClientService service;
/// <summary>Constructs a new resource.</summary>
public UrlNotificationsResource(Google.Apis.Services.IClientService service)
{
this.service = service;
}
/// <summary>Gets metadata about a Web Document. This method can _only_ be used to query URLs that were
/// previously seen in successful Indexing API notifications. Includes the latest `UrlNotification` received via
/// this API.</summary>
public virtual GetMetadataRequest GetMetadata()
{
return new GetMetadataRequest(service);
}
/// <summary>Gets metadata about a Web Document. This method can _only_ be used to query URLs that were
/// previously seen in successful Indexing API notifications. Includes the latest `UrlNotification` received via
/// this API.</summary>
public class GetMetadataRequest : IndexingBaseServiceRequest<Google.Apis.Indexing.v3.Data.UrlNotificationMetadata>
{
/// <summary>Constructs a new GetMetadata request.</summary>
public GetMetadataRequest(Google.Apis.Services.IClientService service)
: base(service)
{
InitParameters();
}
/// <summary>URL that is being queried.</summary>
[Google.Apis.Util.RequestParameterAttribute("url", Google.Apis.Util.RequestParameterType.Query)]
public virtual string Url { get; set; }
///<summary>Gets the method name.</summary>
public override string MethodName
{
get { return "getMetadata"; }
}
///<summary>Gets the HTTP method.</summary>
public override string HttpMethod
{
get { return "GET"; }
}
///<summary>Gets the REST path.</summary>
public override string RestPath
{
get { return "v3/urlNotifications/metadata"; }
}
/// <summary>Initializes GetMetadata parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
RequestParameters.Add(
"url", new Google.Apis.Discovery.Parameter
{
Name = "url",
IsRequired = false,
ParameterType = "query",
DefaultValue = null,
Pattern = null,
});
}
}
/// <summary>Notifies that a URL has been updated or deleted.</summary>
/// <param name="body">The body of the request.</param>
public virtual PublishRequest Publish(Google.Apis.Indexing.v3.Data.UrlNotification body)
{
return new PublishRequest(service, body);
}
/// <summary>Notifies that a URL has been updated or deleted.</summary>
public class PublishRequest : IndexingBaseServiceRequest<Google.Apis.Indexing.v3.Data.PublishUrlNotificationResponse>
{
/// <summary>Constructs a new Publish request.</summary>
public PublishRequest(Google.Apis.Services.IClientService service, Google.Apis.Indexing.v3.Data.UrlNotification body)
: base(service)
{
Body = body;
InitParameters();
}
/// <summary>Gets or sets the body of this request.</summary>
Google.Apis.Indexing.v3.Data.UrlNotification Body { get; set; }
///<summary>Returns the body of the request.</summary>
protected override object GetBody() { return Body; }
///<summary>Gets the method name.</summary>
public override string MethodName
{
get { return "publish"; }
}
///<summary>Gets the HTTP method.</summary>
public override string HttpMethod
{
get { return "POST"; }
}
///<summary>Gets the REST path.</summary>
public override string RestPath
{
get { return "v3/urlNotifications:publish"; }
}
/// <summary>Initializes Publish parameter list.</summary>
protected override void InitParameters()
{
base.InitParameters();
}
}
}
}
namespace Google.Apis.Indexing.v3.Data
{
/// <summary>Output for PublishUrlNotification</summary>
public class PublishUrlNotificationResponse : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Description of the notification events received for this URL.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("urlNotificationMetadata")]
public virtual UrlNotificationMetadata UrlNotificationMetadata { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>`UrlNotification` is the resource used in all Indexing API calls. It describes one event in the life
/// cycle of a Web Document.</summary>
public class UrlNotification : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Creation timestamp for this notification. Users should _not_ specify it, the field is ignored at
/// the request time.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("notifyTime")]
public virtual object NotifyTime { get; set; }
/// <summary>The URL life cycle event that Google is being notified about.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("type")]
public virtual string Type { get; set; }
/// <summary>The object of this notification. The URL must be owned by the publisher of this notification and,
/// in case of `URL_UPDATED` notifications, it _must_ be crawlable by Google.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("url")]
public virtual string Url { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
/// <summary>Summary of the most recent Indexing API notifications successfully received, for a given URL.</summary>
public class UrlNotificationMetadata : Google.Apis.Requests.IDirectResponseSchema
{
/// <summary>Latest notification received with type `URL_REMOVED`.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("latestRemove")]
public virtual UrlNotification LatestRemove { get; set; }
/// <summary>Latest notification received with type `URL_UPDATED`.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("latestUpdate")]
public virtual UrlNotification LatestUpdate { get; set; }
/// <summary>URL to which this metadata refers.</summary>
[Newtonsoft.Json.JsonPropertyAttribute("url")]
public virtual string Url { get; set; }
/// <summary>The ETag of the item.</summary>
public virtual string ETag { get; set; }
}
}
| |
#region Copyright (c) 2004 Ian Davis and James Carlyle
/*------------------------------------------------------------------------------
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 2004 Ian Davis and James Carlyle
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
------------------------------------------------------------------------------*/
#endregion
namespace SemPlan.Spiral.Utility {
using SemPlan.Spiral.Core;
using System;
using System.Collections;
using System.Xml;
/// <summary>
/// Represents a writer that outputs triples in the RDF/XML format
/// </summary>
/// <remarks>
/// $Id: RdfXmlWriter.cs,v 1.2 2005/05/26 14:24:31 ian Exp $
///</remarks>
public class RdfXmlWriter : RdfWriter {
private const string RDF_NAMESPACE_URI = "http://www.w3.org/1999/02/22-rdf-syntax-ns#";
private XmlWriter itsXmlWriter;
private QualifiedName itsCurrentPredicateQualifiedName;
private Hashtable itsNamespacePrefixes;
private Hashtable itsUsedNamespaces;
private int itsNextNamespaceIndex;
private Hashtable itsMappedNodeIds;
private int itsNextNodeIdIndex;
private Hashtable itsBufferedSubjects;
private Subject itsCurrentSubject;
private enum WriterState {
Waiting, InSubject, InPredicate, InObject
}
private WriterState itsState;
public RdfXmlWriter(XmlWriter writer) {
itsXmlWriter = writer;
itsState = WriterState.Waiting;
itsNamespacePrefixes = new Hashtable();
itsNamespacePrefixes[RDF_NAMESPACE_URI] = "rdf";
itsNamespacePrefixes["http://xmlns.com/foaf/0.1/"] = "foaf";
itsNamespacePrefixes["http://xmlns.com/wot/0.1/"] = "wot";
itsNamespacePrefixes["http://www.w3.org/2000/01/rdf-schema#"] = "rdfs";
itsNamespacePrefixes["http://www.w3.org/2002/07/owl#"] = "owl";
itsNamespacePrefixes["http://purl.org/vocab/bio/0.1/"] = "bio";
itsNamespacePrefixes["http://purl.org/dc/elements/1.1/"] = "dc";
itsNamespacePrefixes["http://purl.org/dc/terms/"] = "dct";
itsNamespacePrefixes["http://web.resource.org/cc/"] = "cc";
itsNamespacePrefixes["http://purl.org/vocab/relationship/"] = "rel";
itsNamespacePrefixes["http://www.w3.org/2003/01/geo/wgs84_pos#"] = "geo";
itsNamespacePrefixes["http://purl.org/rss/1.0/"] = "rss";
itsUsedNamespaces = new Hashtable();
itsNextNamespaceIndex = 1;
itsMappedNodeIds = new Hashtable();
itsNextNodeIdIndex = 1;
itsBufferedSubjects = new Hashtable();
itsCurrentSubject = null;
itsCurrentPredicateQualifiedName = null;
}
public void StartOutput() {
itsXmlWriter.WriteStartDocument(true);
}
public void EndOutput() {
itsXmlWriter.WriteStartElement( (string)itsNamespacePrefixes[RDF_NAMESPACE_URI], "RDF", RDF_NAMESPACE_URI);
foreach ( string ns in itsUsedNamespaces.Keys) {
itsXmlWriter.WriteAttributeString("xmlns",(string)itsNamespacePrefixes[ns], null, ns);
}
foreach (Subject subject in itsBufferedSubjects.Values) {
subject.Write(this, itsXmlWriter);
}
itsXmlWriter.WriteEndElement(); // rdf:RDF
itsXmlWriter.WriteEndDocument();
}
public void StartSubject() {
itsState = WriterState.InSubject;
}
public void EndSubject() {
itsState = WriterState.Waiting;
}
public void StartPredicate() {
itsState = WriterState.InPredicate;
}
public void EndPredicate() {
itsState = WriterState.InSubject;
}
public void StartObject() {
itsState = WriterState.InObject;
}
public void EndObject() {
itsState = WriterState.InPredicate;
}
public void WriteUriRef(string uriRef) {
if ( itsState == WriterState.InSubject ) {
if ( itsBufferedSubjects.Contains( "uriref:" + uriRef ) ) {
itsCurrentSubject =(Subject) itsBufferedSubjects["uriref:" + uriRef];
}
else {
UriRefSubject subject = new UriRefSubject(uriRef);
itsBufferedSubjects["uriref:" + uriRef] = subject;
itsCurrentSubject = subject;
}
}
else if (itsState == WriterState.InPredicate) {
itsCurrentPredicateQualifiedName = ParseQualifiedName(uriRef);
}
else if ( itsState == WriterState.InObject ) {
UriRefProperty property = new UriRefProperty(itsCurrentPredicateQualifiedName, uriRef);
itsCurrentSubject.Add(property);
}
}
public void WritePlainLiteral(string lexicalValue) {
LiteralProperty property = new LiteralProperty(itsCurrentPredicateQualifiedName, lexicalValue, null, null);
itsCurrentSubject.Add(property);
}
public void WritePlainLiteral(string lexicalValue, string language) {
LiteralProperty property = new LiteralProperty(itsCurrentPredicateQualifiedName, lexicalValue, language, null);
itsCurrentSubject.Add(property);
}
public void WriteTypedLiteral(string lexicalValue, string uriRef) {
LiteralProperty property = new LiteralProperty(itsCurrentPredicateQualifiedName, lexicalValue, null, uriRef);
itsCurrentSubject.Add(property);
}
public void WriteBlankNode(string nodeId) {
string mappedNodeId;
if ( itsMappedNodeIds.Contains( nodeId )) {
mappedNodeId = (string)itsMappedNodeIds[ nodeId ];
}
else {
mappedNodeId = "genid" + itsNextNodeIdIndex++;
itsMappedNodeIds[ nodeId ] = mappedNodeId;
}
if ( itsState == WriterState.InSubject ) {
if ( itsBufferedSubjects.Contains( "blank:" + mappedNodeId ) ) {
itsCurrentSubject = (Subject)itsBufferedSubjects["blank:" + mappedNodeId];
}
else {
BlankNodeSubject subject = new BlankNodeSubject(mappedNodeId);
itsBufferedSubjects["blank:" + mappedNodeId] = subject;
itsCurrentSubject = subject;
}
}
else if ( itsState == WriterState.InObject ) {
BlankNodeProperty property = new BlankNodeProperty( itsCurrentPredicateQualifiedName, mappedNodeId);
itsCurrentSubject.Add(property);
}
}
public void RegisterNamespacePrefix( string ns, string prefix) {
itsNamespacePrefixes[ns] = prefix;
}
public QualifiedName ParseQualifiedName(string uriRef) {
int namespaceDelimiterIndex = uriRef.LastIndexOf('#');
if (namespaceDelimiterIndex == -1) {
namespaceDelimiterIndex = uriRef.LastIndexOf('/');
if (namespaceDelimiterIndex == -1) {
throw new ArgumentException("Predicate uriref must contain / or #");
}
}
string localName = uriRef.Substring(namespaceDelimiterIndex + 1);
string ns = uriRef.Substring(0, namespaceDelimiterIndex + 1);
if (! itsNamespacePrefixes.Contains( ns ) ) {
string prefix = "ns" + itsNextNamespaceIndex++;
RegisterNamespacePrefix( ns, prefix);
}
itsUsedNamespaces[ns] = 1;
return new QualifiedName( localName, ns );
}
public class QualifiedName {
private string itsLocalName;
private string itsNamespace;
public QualifiedName( string localName, string ns ) {
itsLocalName = localName;
itsNamespace = ns;
}
public string GetNamespace() {
return itsNamespace;
}
public string GetLocalName() {
return itsLocalName;
}
}
private abstract class Subject {
private ArrayList itsProperties;
private string itsTypeUriRef;
public Subject() {
itsProperties = new ArrayList();
itsTypeUriRef = null;
}
public void Write(RdfXmlWriter rdfWriter, XmlWriter xmlWriter) {
if (itsTypeUriRef == null) {
xmlWriter.WriteStartElement( (string)rdfWriter.itsNamespacePrefixes[RDF_NAMESPACE_URI], "Description", RDF_NAMESPACE_URI);
}
else {
QualifiedName typeQualifiedName = rdfWriter.ParseQualifiedName(itsTypeUriRef);
xmlWriter.WriteStartElement((string)rdfWriter.itsNamespacePrefixes[typeQualifiedName.GetNamespace()], typeQualifiedName.GetLocalName(), typeQualifiedName.GetNamespace());
}
WriteIdentifingAttribute( rdfWriter, xmlWriter);
foreach (Property property in itsProperties) {
property.Write(rdfWriter, xmlWriter);
}
xmlWriter.WriteEndElement();
}
public abstract void WriteIdentifingAttribute(RdfXmlWriter rdfWriter, XmlWriter xmlWriter);
public void Add(Property property) {
itsProperties.Add(property);
}
public void Add(UriRefProperty property) {
if (itsTypeUriRef == null && property.GetPredicateQualifiedName().GetNamespace().Equals(RDF_NAMESPACE_URI) && property.GetPredicateQualifiedName().GetLocalName().Equals("type") ) {
itsTypeUriRef = property.GetUriRef();
}
else {
itsProperties.Add(property);
}
}
}
private interface Property {
void Write(RdfXmlWriter rdfWriter, XmlWriter xmlWriter);
}
private class UriRefSubject : Subject {
private string itsUriRef;
public UriRefSubject(string uriRef) {
itsUriRef = uriRef;
}
public override void WriteIdentifingAttribute(RdfXmlWriter rdfWriter, XmlWriter xmlWriter) {
xmlWriter.WriteAttributeString( (string)rdfWriter.itsNamespacePrefixes[RDF_NAMESPACE_URI], "about", RDF_NAMESPACE_URI, itsUriRef);
}
}
private class BlankNodeSubject : Subject {
private string itsNodeId;
public BlankNodeSubject(string nodeId) {
itsNodeId = nodeId;
}
public override void WriteIdentifingAttribute(RdfXmlWriter rdfWriter, XmlWriter xmlWriter) {
xmlWriter.WriteAttributeString( (string)rdfWriter.itsNamespacePrefixes[RDF_NAMESPACE_URI], "nodeID", RDF_NAMESPACE_URI, itsNodeId);
}
}
private class UriRefProperty : Property {
private QualifiedName itsPredicateQualifiedName;
private string itsUriRef;
public UriRefProperty(QualifiedName predicateQualifiedName, string uriRef) {
itsPredicateQualifiedName = predicateQualifiedName;
itsUriRef = uriRef;
}
public void Write(RdfXmlWriter rdfWriter, XmlWriter xmlWriter) {
xmlWriter.WriteStartElement((string)rdfWriter.itsNamespacePrefixes[itsPredicateQualifiedName.GetNamespace()], itsPredicateQualifiedName.GetLocalName(), itsPredicateQualifiedName.GetNamespace());
xmlWriter.WriteAttributeString( (string)rdfWriter.itsNamespacePrefixes[RDF_NAMESPACE_URI], "resource", RDF_NAMESPACE_URI, itsUriRef);
xmlWriter.WriteEndElement();
}
public QualifiedName GetPredicateQualifiedName() {
return itsPredicateQualifiedName;
}
public string GetUriRef() {
return itsUriRef;
}
}
private class BlankNodeProperty : Property {
private QualifiedName itsPredicateQualifiedName;
private string itsNodeId;
public BlankNodeProperty(QualifiedName predicateQualifiedName, string nodeId) {
itsPredicateQualifiedName = predicateQualifiedName;
itsNodeId = nodeId;
}
public void Write(RdfXmlWriter rdfWriter, XmlWriter xmlWriter) {
xmlWriter.WriteStartElement((string)rdfWriter.itsNamespacePrefixes[itsPredicateQualifiedName.GetNamespace()], itsPredicateQualifiedName.GetLocalName(), itsPredicateQualifiedName.GetNamespace());
xmlWriter.WriteAttributeString( (string)rdfWriter.itsNamespacePrefixes[RDF_NAMESPACE_URI], "nodeID", RDF_NAMESPACE_URI, itsNodeId);
xmlWriter.WriteEndElement();
}
}
private class LiteralProperty : Property{
private QualifiedName itsPredicateQualifiedName;
private string itsLexicalValue;
private string itsLanguage;
private string itsDataType;
public LiteralProperty(QualifiedName predicateQualifiedName, string lexicalValue, string language, string dataType) {
itsPredicateQualifiedName = predicateQualifiedName;
itsLexicalValue = lexicalValue;
itsLanguage = language;
itsDataType = dataType;
}
public void Write(RdfXmlWriter rdfWriter, XmlWriter xmlWriter) {
xmlWriter.WriteStartElement((string)rdfWriter.itsNamespacePrefixes[itsPredicateQualifiedName.GetNamespace()], itsPredicateQualifiedName.GetLocalName(), itsPredicateQualifiedName.GetNamespace());
if (itsLanguage != null) {
xmlWriter.WriteAttributeString( "xml", "lang", null, itsLanguage);
}
else if (itsDataType != null) {
xmlWriter.WriteAttributeString( (string)rdfWriter.itsNamespacePrefixes[RDF_NAMESPACE_URI], "datatype", RDF_NAMESPACE_URI, itsDataType);
}
xmlWriter.WriteString( itsLexicalValue );
xmlWriter.WriteEndElement();
}
}
}
}
| |
// -----------------------------------------------------------
//
// This file was generated, please do not modify.
//
// -----------------------------------------------------------
namespace EmptyKeys.UserInterface.Generated {
using System;
using System.CodeDom.Compiler;
using System.Collections.ObjectModel;
using EmptyKeys.UserInterface;
using EmptyKeys.UserInterface.Charts;
using EmptyKeys.UserInterface.Data;
using EmptyKeys.UserInterface.Controls;
using EmptyKeys.UserInterface.Controls.Primitives;
using EmptyKeys.UserInterface.Input;
using EmptyKeys.UserInterface.Media;
using EmptyKeys.UserInterface.Media.Animation;
using EmptyKeys.UserInterface.Media.Imaging;
using EmptyKeys.UserInterface.Shapes;
using EmptyKeys.UserInterface.Renderers;
using EmptyKeys.UserInterface.Themes;
[GeneratedCodeAttribute("Empty Keys UI Generator", "1.11.0.0")]
public partial class MapMakerMainMenu : UIRoot {
private Grid e_0;
private StackPanel e_1;
private TextBlock e_2;
private Button LoadMapBtn;
private Button SaveMapBtn;
private StackPanel e_3;
private TextBlock e_4;
private ScrollViewer e_5;
private StackPanel ModsListPanel;
private Button LoadModBtn;
private StackPanel e_6;
private CheckBox LockToGridChkBox;
private StackPanel e_7;
private TextBlock e_8;
private TextBox SearchBox;
private ScrollViewer e_9;
private StackPanel MapObjectSelectorPanel;
private Button MoreSettingsBtn;
public MapMakerMainMenu() :
base() {
this.Initialize();
}
public MapMakerMainMenu(int width, int height) :
base(width, height) {
this.Initialize();
}
private void Initialize() {
Style style = RootStyle.CreateRootStyle();
style.TargetType = this.GetType();
this.Style = style;
this.InitializeComponent();
}
private void InitializeComponent() {
this.Background = new SolidColorBrush(new ColorW(255, 255, 255, 0));
InitializeElementResources(this);
// e_0 element
this.e_0 = new Grid();
this.Content = this.e_0;
this.e_0.Name = "e_0";
// e_1 element
this.e_1 = new StackPanel();
this.e_0.Children.Add(this.e_1);
this.e_1.Name = "e_1";
this.e_1.Width = 200F;
this.e_1.HorizontalAlignment = HorizontalAlignment.Left;
this.e_1.VerticalAlignment = VerticalAlignment.Stretch;
this.e_1.Background = new SolidColorBrush(new ColorW(0, 0, 0, 79));
// e_2 element
this.e_2 = new TextBlock();
this.e_1.Children.Add(this.e_2);
this.e_2.Name = "e_2";
this.e_2.HorizontalAlignment = HorizontalAlignment.Center;
this.e_2.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.e_2.Text = "Press F8 to hide/show this menu";
this.e_2.FontFamily = new FontFamily("JHUF");
this.e_2.FontSize = 12F;
// LoadMapBtn element
this.LoadMapBtn = new Button();
this.e_1.Children.Add(this.LoadMapBtn);
this.LoadMapBtn.Name = "LoadMapBtn";
this.LoadMapBtn.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.LoadMapBtn.FontFamily = new FontFamily("JHUF");
this.LoadMapBtn.FontSize = 12F;
this.LoadMapBtn.Content = "Load map";
// SaveMapBtn element
this.SaveMapBtn = new Button();
this.e_1.Children.Add(this.SaveMapBtn);
this.SaveMapBtn.Name = "SaveMapBtn";
this.SaveMapBtn.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.SaveMapBtn.FontFamily = new FontFamily("JHUF");
this.SaveMapBtn.FontSize = 12F;
this.SaveMapBtn.Content = "Save map";
// e_3 element
this.e_3 = new StackPanel();
this.e_1.Children.Add(this.e_3);
this.e_3.Name = "e_3";
// e_4 element
this.e_4 = new TextBlock();
this.e_3.Children.Add(this.e_4);
this.e_4.Name = "e_4";
this.e_4.HorizontalAlignment = HorizontalAlignment.Center;
this.e_4.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.e_4.Text = "Mods this map depends on:";
this.e_4.Padding = new Thickness(5F, 5F, 5F, 5F);
this.e_4.FontFamily = new FontFamily("JHUF");
this.e_4.FontSize = 12F;
// e_5 element
this.e_5 = new ScrollViewer();
this.e_3.Children.Add(this.e_5);
this.e_5.Name = "e_5";
this.e_5.Height = 90F;
this.e_5.MaxHeight = 90F;
// ModsListPanel element
this.ModsListPanel = new StackPanel();
this.e_5.Content = this.ModsListPanel;
this.ModsListPanel.Name = "ModsListPanel";
// LoadModBtn element
this.LoadModBtn = new Button();
this.e_3.Children.Add(this.LoadModBtn);
this.LoadModBtn.Name = "LoadModBtn";
this.LoadModBtn.Margin = new Thickness(5F, 5F, 5F, 5F);
this.LoadModBtn.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.LoadModBtn.FontFamily = new FontFamily("JHUF");
this.LoadModBtn.FontSize = 12F;
this.LoadModBtn.Content = "Load another mod";
// e_6 element
this.e_6 = new StackPanel();
this.e_1.Children.Add(this.e_6);
this.e_6.Name = "e_6";
this.e_6.Margin = new Thickness(5F, 5F, 0F, 5F);
this.e_6.HorizontalAlignment = HorizontalAlignment.Left;
// LockToGridChkBox element
this.LockToGridChkBox = new CheckBox();
this.e_6.Children.Add(this.LockToGridChkBox);
this.LockToGridChkBox.Name = "LockToGridChkBox";
this.LockToGridChkBox.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.LockToGridChkBox.FontFamily = new FontFamily("JHUF");
this.LockToGridChkBox.FontSize = 12F;
this.LockToGridChkBox.Content = "Lock to grid";
// e_7 element
this.e_7 = new StackPanel();
this.e_1.Children.Add(this.e_7);
this.e_7.Name = "e_7";
this.e_7.Margin = new Thickness(0F, 0F, 0F, 10F);
this.e_7.Orientation = Orientation.Horizontal;
// e_8 element
this.e_8 = new TextBlock();
this.e_7.Children.Add(this.e_8);
this.e_8.Name = "e_8";
this.e_8.HorizontalAlignment = HorizontalAlignment.Center;
this.e_8.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.e_8.Text = "Search:";
this.e_8.Padding = new Thickness(0F, 8F, 8F, 8F);
this.e_8.FontFamily = new FontFamily("JHUF");
this.e_8.FontSize = 12F;
// SearchBox element
this.SearchBox = new TextBox();
this.e_7.Children.Add(this.SearchBox);
this.SearchBox.Name = "SearchBox";
this.SearchBox.Width = 150F;
// e_9 element
this.e_9 = new ScrollViewer();
this.e_1.Children.Add(this.e_9);
this.e_9.Name = "e_9";
this.e_9.Height = 180F;
this.e_9.MaxHeight = 180F;
this.e_9.HorizontalScrollBarVisibility = ScrollBarVisibility.Disabled;
this.e_9.VerticalScrollBarVisibility = ScrollBarVisibility.Visible;
// MapObjectSelectorPanel element
this.MapObjectSelectorPanel = new StackPanel();
this.e_9.Content = this.MapObjectSelectorPanel;
this.MapObjectSelectorPanel.Name = "MapObjectSelectorPanel";
this.MapObjectSelectorPanel.Width = 175F;
this.MapObjectSelectorPanel.Background = new SolidColorBrush(new ColorW(0, 0, 0, 144));
// MoreSettingsBtn element
this.MoreSettingsBtn = new Button();
this.e_1.Children.Add(this.MoreSettingsBtn);
this.MoreSettingsBtn.Name = "MoreSettingsBtn";
this.MoreSettingsBtn.Margin = new Thickness(5F, 5F, 5F, 5F);
this.MoreSettingsBtn.Foreground = new SolidColorBrush(new ColorW(255, 255, 255, 255));
this.MoreSettingsBtn.FontFamily = new FontFamily("JHUF");
this.MoreSettingsBtn.FontSize = 12F;
this.MoreSettingsBtn.Content = "Map Settings";
FontManager.Instance.AddFont("JHUF", 36F, FontStyle.Regular, "JHUF_27_Regular");
FontManager.Instance.AddFont("JHUF", 18F, FontStyle.Regular, "JHUF_13.5_Regular");
FontManager.Instance.AddFont("JHUF", 24F, FontStyle.Regular, "JHUF_18_Regular");
FontManager.Instance.AddFont("JHUF", 72F, FontStyle.Regular, "JHUF_54_Regular");
FontManager.Instance.AddFont("JHUF", 96F, FontStyle.Regular, "JHUF_72_Regular");
FontManager.Instance.AddFont("JHUF", 48F, FontStyle.Regular, "JHUF_36_Regular");
FontManager.Instance.AddFont("JHUF", 20F, FontStyle.Regular, "JHUF_15_Regular");
FontManager.Instance.AddFont("JHUF", 40F, FontStyle.Regular, "JHUF_30_Regular");
FontManager.Instance.AddFont("JHUF", 32F, FontStyle.Regular, "JHUF_24_Regular");
FontManager.Instance.AddFont("JHUF", 30F, FontStyle.Regular, "JHUF_22.5_Regular");
FontManager.Instance.AddFont("JHUF", 16F, FontStyle.Regular, "JHUF_12_Regular");
FontManager.Instance.AddFont("JHUF", 12F, FontStyle.Regular, "JHUF_9_Regular");
}
private static void InitializeElementResources(UIElement elem) {
elem.Resources.MergedDictionaries.Add(UITemplateDictionary.Instance);
}
}
}
| |
/*
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License. See License.txt in the project root for license information.
*/
namespace Adxstudio.Xrm.Cms
{
using System;
using System.Web;
using Adxstudio.Xrm.Security;
using Adxstudio.Xrm.Web.Security;
using Microsoft.Xrm.Client.Security;
using Microsoft.Xrm.Sdk;
using Microsoft.Xrm.Sdk.Client;
using Adxstudio.Xrm.Cms.Security;
using Adxstudio.Xrm.Services;
using Adxstudio.Xrm.Services.Query;
using Adxstudio.Xrm.Web;
using Microsoft.Xrm.Sdk.Query;
internal class PublishingStateAccessProvider : ContentMapAccessProvider
{
public PublishingStateAccessProvider(HttpContext context)
: base(context)
{
}
public PublishingStateAccessProvider(HttpContextBase context)
: base(context.GetContentMapProvider())
{
}
public PublishingStateAccessProvider(IContentMapProvider contentMapProvider)
: base(contentMapProvider)
{
}
private static bool UserCanPreview(OrganizationServiceContext context, Entity entity)
{
var website = context.GetWebsite(entity);
if (website == null)
{
return false;
}
var preview = new PreviewPermission(context, website);
return preview.IsEnabledAndPermitted;
}
protected override bool TryAssert(OrganizationServiceContext context, Entity entity, CrmEntityRight right, CrmEntityCacheDependencyTrace dependencies, ContentMap map)
{
switch (entity.LogicalName)
{
case "adx_weblink":
WebLinkNode link;
if (map.TryGetValue(entity, out link))
{
return TryAssert(link) || UserCanPreview(context, entity);
}
break;
case "adx_webpage":
WebPageNode page;
if (map.TryGetValue(entity, out page))
{
return TryAssert(page.PublishingState) || UserCanPreview(context, entity);
}
break;
case "adx_weblinkset":
WebLinkSetNode linkset;
if (map.TryGetValue(entity, out linkset))
{
return TryAssert(linkset.PublishingState) || UserCanPreview(context, entity);
}
break;
case "adx_webfile":
WebFileNode file;
if (map.TryGetValue(entity, out file))
{
return TryAssert(file.PublishingState) || UserCanPreview(context, entity);
}
break;
case "adx_shortcut":
ShortcutNode shortcut;
if (map.TryGetValue(entity, out shortcut))
{
return TryAssert(shortcut) || UserCanPreview(context, entity);
}
break;
case "adx_communityforum":
ForumNode forum;
if (map.TryGetValue(entity, out forum))
{
return TryAssert(forum.PublishingState) || UserCanPreview(context, entity);
}
break;
}
return this.TryAssert(context, entity, right, dependencies);
}
private static bool TryAssert(ShortcutNode shortcut)
{
if (!shortcut.DisableTargetValidation.GetValueOrDefault())
{
if (shortcut.WebPage != null && !shortcut.WebPage.IsReference)
{
return TryAssert(shortcut.WebPage.PublishingState);
}
if (shortcut.WebFile != null && !shortcut.WebFile.IsReference)
{
return TryAssert(shortcut.WebFile.PublishingState);
}
}
if (shortcut.Parent != null && !shortcut.Parent.IsReference)
{
return TryAssert(shortcut.Parent.PublishingState);
}
return false;
}
private static bool TryAssert(WebLinkNode link)
{
if (link.PublishingState != null && link.PublishingState.IsReference)
{
ADXTrace.Instance.TraceInfo(TraceCategory.Exception,
string.Format(@"InvalidOperationException: IsReference cannot be set to true. WebLinkNode: Id:{0},
WebLinkNode.PublishingState.Website.Id: {1}, WebLinkNode.PublishingState.Website.Name: {2},
WebLinkNode.PublishingState.Website.WebsiteLanguage: {3}, WebLinkNode.WebPage.Website.Id: {4},
WebLinkNode.WebPage.Website.Name: {5}, WebLinkNode.WebPage.Website.WebsiteLanguage: {6}",
link.Id, link.PublishingState.Website.Id, link.PublishingState.Website.Name, link.PublishingState.Website.WebsiteLanguage,
link.WebPage.Website.Id, link.WebPage.Website.Name, link.WebPage.Website.WebsiteLanguage));
throw new InvalidOperationException();
}
if (!TryAssert(link.PublishingState))
{
// the link is in a non-visible state, check for preview access
return false;
}
// the link is in a visible state, check the related web page
if (!link.DisablePageValidation.GetValueOrDefault() && link.WebPage != null && !link.WebPage.IsReference)
{
// validate the link's webpage
return TryAssert(link.WebPage.PublishingState);
}
// the link is visible and the page is valid (or the link is an external URL)
return true;
}
private static bool TryAssert(PublishingStateNode state)
{
if (state != null && state.IsReference)
{
throw new InvalidOperationException();
}
var isLanguagePublished = true;
// For Multi-Language portal, if the selected language is in draft state, we need to return false.
var contextLanguageInfo = HttpContext.Current.GetContextLanguageInfo();
if (contextLanguageInfo.IsCrmMultiLanguageEnabled)
{
isLanguagePublished = contextLanguageInfo.ContextLanguage.IsPublished;
}
if (state == null || (state.IsVisible.GetValueOrDefault() && isLanguagePublished))
{
return true;
}
return false;
}
public override bool TryAssert(OrganizationServiceContext context, Entity entity, CrmEntityRight right, CrmEntityCacheDependencyTrace dependencies)
{
if (entity == null || right == CrmEntityRight.Change)
{
return false;
}
dependencies.AddEntityDependency(entity);
dependencies.AddEntitySetDependency("adx_webrole");
dependencies.AddEntitySetDependency("adx_webrole_contact");
dependencies.AddEntitySetDependency("adx_webrole_account");
dependencies.AddEntitySetDependency("adx_websiteaccess");
var entityName = entity.LogicalName;
if (entityName == "adx_idea")
{
return entity.GetAttributeValue<bool?>("adx_approved").GetValueOrDefault(false);
}
if (entityName == "adx_ideacomment")
{
return entity.GetAttributeValue<bool?>("adx_approved").GetValueOrDefault(false);
}
EntityReference publishingStateReference = null;
Entity entityPublishingState = null;
switch (entityName)
{
case "adx_communityforumpost":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
case "adx_ad":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
// legacy entities
case "adx_event":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_event");
break;
case "adx_eventschedule":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_eventschedule");
break;
case "adx_eventspeaker":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_eventspeaker");
break;
case "adx_eventsponsor":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_eventsponsor");
break;
case "adx_survey":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_survey");
break;
}
if (publishingStateReference != null)
{
entityPublishingState = context.RetrieveSingle(
"adx_publishingstate",
new[] { "adx_isvisible" },
new Condition("adx_publishingstateid", ConditionOperator.Equal, publishingStateReference.Id));
}
if (entityPublishingState == null)
{
return true;
}
dependencies.AddEntityDependency(entityPublishingState);
if (entityPublishingState.GetAttributeValue<bool?>("adx_isvisible").GetValueOrDefault())
{
return true;
}
return UserCanPreview(context, entityPublishingState);
}
/// <summary>
/// Test whether or not an Entity's publishing state is visible in the current context.
/// </summary>
public virtual bool TryAssert(OrganizationServiceContext context, Entity entity)
{
var securityContextKey = GetType().FullName;
ICacheSupportingCrmEntitySecurityProvider underlyingProvider = new ApplicationCachingCrmEntitySecurityProvider(new UncachedProvider(), new VaryByPreviewCrmEntitySecurityCacheInfoFactory(securityContextKey));
if (HttpContext.Current != null)
{
underlyingProvider = new RequestCachingCrmEntitySecurityProvider(underlyingProvider, new CrmEntitySecurityCacheInfoFactory(securityContextKey));
}
return underlyingProvider.TryAssert(context, entity, CrmEntityRight.Read);
}
internal class UncachedProvider : CacheSupportingCrmEntitySecurityProvider
{
public override bool TryAssert(OrganizationServiceContext context, Entity entity, CrmEntityRight right, CrmEntityCacheDependencyTrace dependencies)
{
if (entity == null || right == CrmEntityRight.Change)
{
return false;
}
dependencies.AddEntityDependency(entity);
dependencies.AddEntitySetDependency("adx_webrole");
dependencies.AddEntitySetDependency("adx_webrole_contact");
dependencies.AddEntitySetDependency("adx_webrole_account");
dependencies.AddEntitySetDependency("adx_websiteaccess");
var entityName = entity.LogicalName;
// Weblinks require some special handling.
if (entityName == "adx_weblink")
{
var weblinkPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_weblink");
// If a weblink has a publishing state, and that state is not visible, state access is
// denied (unless the user can preview).
if (weblinkPublishingState != null && !weblinkPublishingState.GetAttributeValue<bool?>("adx_isvisible").GetValueOrDefault())
{
dependencies.AddEntityDependency(weblinkPublishingState);
return UserCanPreview(context, entity);
}
var weblinkPage = context.RetrieveRelatedEntity(entity, "adx_webpage_weblink");
// If a weblink has an associated page, and page validation is not disabled, return the
// result of assertion on that page.
if (weblinkPage != null && !entity.GetAttributeValue<bool?>("adx_disablepagevalidation").GetValueOrDefault(false))
{
return TryAssert(context, weblinkPage, right, dependencies);
}
}
if (entityName == "adx_idea")
{
return entity.GetAttributeValue<bool?>("adx_approved").GetValueOrDefault(false);
}
if (entityName == "adx_ideacomment")
{
return entity.GetAttributeValue<bool?>("adx_approved").GetValueOrDefault(false);
}
EntityReference publishingStateReference = null;
Entity entityPublishingState = null;
switch (entityName)
{
case "adx_webpage":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
case "adx_weblinkset":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
case "adx_webfile":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
case "adx_communityforum":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
case "adx_communityforumpost":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
case "adx_ad":
publishingStateReference = entity.GetAttributeValue<EntityReference>("adx_publishingstateid");
break;
// legacy entities
case "adx_event":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_event");
break;
case "adx_eventschedule":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_eventschedule");
break;
case "adx_eventspeaker":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_eventspeaker");
break;
case "adx_eventsponsor":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_eventsponsor");
break;
case "adx_survey":
entityPublishingState = context.RetrieveRelatedEntity(entity, "adx_publishingstate_survey");
break;
}
if (publishingStateReference != null)
{
entityPublishingState = context.RetrieveSingle(
"adx_publishingstate",
new[] { "adx_isvisible" },
new Condition("adx_publishingstateid", ConditionOperator.Equal, publishingStateReference.Id));
}
if (entityPublishingState == null)
{
return true;
}
dependencies.AddEntityDependency(entityPublishingState);
if (entityPublishingState.GetAttributeValue<bool?>("adx_isvisible").GetValueOrDefault())
{
return true;
}
return UserCanPreview(context, entityPublishingState);
}
private static bool UserCanPreview(OrganizationServiceContext context, Entity entity)
{
var website = context.GetWebsite(entity);
if (website == null)
{
return false;
}
var preview = new PreviewPermission(context, website);
return preview.IsEnabledAndPermitted;
}
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System;
internal class TestApp
{
private static unsafe long test_5(B b)
{
return (&b)->m_bval;
}
private static unsafe long test_12()
{
fixed (B* pb = &AA.s_x.m_b)
{
return pb->m_bval;
}
}
private static unsafe long test_19(B* pb, long i)
{
return (&pb[i * 2])->m_bval;
}
private static unsafe long test_26(B* pb1, B* pb2)
{
return (pb1 >= pb2 ? pb1 : null)->m_bval;
}
private static unsafe long test_33(long pb)
{
return ((B*)pb)->m_bval;
}
private static unsafe long test_40(B* pb)
{
return pb[0].m_bval;
}
private static unsafe long test_47(B[] ab, long i)
{
fixed (B* pb = &ab[i])
{
return pb[0].m_bval;
}
}
private static unsafe long test_54(B* pb)
{
return (pb += 6)[0].m_bval;
}
private static unsafe long test_61(B* pb, long[,,] i, long ii)
{
return (&pb[++i[--ii, 0, 0]])[0].m_bval;
}
private static unsafe long test_68(AA* px)
{
return ((B*)AA.get_pb_i(px))[0].m_bval;
}
private static unsafe long test_75(byte diff, A* pa)
{
return ((B*)(((byte*)pa) + diff))[0].m_bval;
}
private static unsafe long test_82()
{
AA loc_x = new AA(0, 100);
return AA.get_bv1((&loc_x.m_b));
}
private static unsafe long test_89(B[][] ab, long i, long j)
{
fixed (B* pb = &ab[i][j])
{
return AA.get_bv1(pb);
}
}
private static unsafe long test_96(B* pb1, long i)
{
B* pb;
return AA.get_bv1((pb = (B*)(((byte*)pb1) + i * sizeof(B))));
}
private static unsafe long test_103(B* pb, long[,,] i, long ii, byte jj)
{
return AA.get_bv1((&pb[i[ii - jj, 0, ii - jj] = ii - 1]));
}
private static unsafe long test_110(ulong ub, byte lb)
{
return AA.get_bv1(((B*)(ub | lb)));
}
private static unsafe long test_117(long p, long s)
{
return AA.get_bv1(((B*)((p >> 4) | s)));
}
private static unsafe long test_124(B[] ab)
{
fixed (B* pb = &ab[0])
{
return AA.get_bv2(*pb);
}
}
private static unsafe long test_131(B* pb)
{
return AA.get_bv2(*(++pb));
}
private static unsafe long test_138(B* pb, long[] i, long ii)
{
return AA.get_bv2(*(&pb[i[ii]]));
}
private static unsafe long test_145(AA* px)
{
return AA.get_bv2(*(AA.get_pb_1(px) + 1));
}
private static unsafe long test_152(long pb)
{
return AA.get_bv2(*((B*)checked(((long)pb) + 1)));
}
private static unsafe long test_159(B* pb)
{
return AA.get_bv3(ref *(pb--));
}
private static unsafe long test_166(AA[,] ab, long i)
{
long j = 0;
fixed (B* pb = &ab[--i, ++j].m_b)
{
return AA.get_bv3(ref *pb);
}
}
private static unsafe long test_173(B* pb1, long i)
{
B* pb;
return AA.get_bv3(ref *(pb = pb1 + i));
}
private static unsafe long test_180(B* pb1, B* pb2)
{
return AA.get_bv3(ref *(pb1 > pb2 ? pb2 : null));
}
private static unsafe long test_187(long pb)
{
return AA.get_bv3(ref *((B*)pb));
}
private static unsafe long test_194(double* pb, long i)
{
return AA.get_bv3(ref *((B*)(pb + i)));
}
private static unsafe long test_201(ref B b)
{
fixed (B* pb = &b)
{
return pb->m_bval == 100 ? 100 : 101;
}
}
private static unsafe long test_208(B* pb)
{
return (--pb)->m_bval == 100 ? 100 : 101;
}
private static unsafe long test_215(B* pb, long i)
{
return (&pb[-(i << (int)i)])->m_bval == 100 ? 100 : 101;
}
private static unsafe long test_222(AA* px)
{
return AA.get_pb(px)->m_bval == 100 ? 100 : 101;
}
private static unsafe long test_229(long pb)
{
return ((B*)checked((long)pb))->m_bval == 100 ? 100 : 101;
}
private static unsafe long test_236(B* pb)
{
return AA.get_i1(&(pb++)->m_bval);
}
private static unsafe long test_243(B[,] ab, long i, long j)
{
fixed (B* pb = &ab[i, j])
{
return AA.get_i1(&pb->m_bval);
}
}
private static unsafe long test_250(B* pb1)
{
B* pb;
return AA.get_i1(&(pb = pb1 - 8)->m_bval);
}
private static unsafe long test_257(B* pb, B* pb1, B* pb2)
{
return AA.get_i1(&(pb = pb + (pb2 - pb1))->m_bval);
}
private static unsafe long test_264(B* pb1, bool trig)
{
fixed (B* pb = &AA.s_x.m_b)
{
return AA.get_i1(&(trig ? pb : pb1)->m_bval);
}
}
private static unsafe long test_271(byte* pb)
{
return AA.get_i1(&((B*)(pb + 7))->m_bval);
}
private static unsafe long test_278(B b)
{
return AA.get_i2((&b)->m_bval);
}
private static unsafe long test_285()
{
fixed (B* pb = &AA.s_x.m_b)
{
return AA.get_i2(pb->m_bval);
}
}
private static unsafe long test_292(B* pb, long i)
{
return AA.get_i2((&pb[i * 2])->m_bval);
}
private static unsafe long test_299(B* pb1, B* pb2)
{
return AA.get_i2((pb1 >= pb2 ? pb1 : null)->m_bval);
}
private static unsafe long test_306(long pb)
{
return AA.get_i2(((B*)pb)->m_bval);
}
private static unsafe long test_313(B* pb)
{
return AA.get_i3(ref pb->m_bval);
}
private static unsafe long test_320(B[] ab, long i)
{
fixed (B* pb = &ab[i])
{
return AA.get_i3(ref pb->m_bval);
}
}
private static unsafe long test_327(B* pb)
{
return AA.get_i3(ref (pb += 6)->m_bval);
}
private static unsafe long test_334(B* pb, long[,,] i, long ii)
{
return AA.get_i3(ref (&pb[++i[--ii, 0, 0]])->m_bval);
}
private static unsafe long test_341(AA* px)
{
return AA.get_i3(ref ((B*)AA.get_pb_i(px))->m_bval);
}
private static unsafe long test_348(byte diff, A* pa)
{
return AA.get_i3(ref ((B*)(((byte*)pa) + diff))->m_bval);
}
private static unsafe long test_355()
{
AA loc_x = new AA(0, 100);
return AA.get_bv1((&loc_x.m_b)) != 100 ? 99 : 100;
}
private static unsafe long test_362(B[][] ab, long i, long j)
{
fixed (B* pb = &ab[i][j])
{
return AA.get_bv1(pb) != 100 ? 99 : 100;
}
}
private static unsafe long test_369(B* pb1, long i)
{
B* pb;
return AA.get_bv1((pb = (B*)(((byte*)pb1) + i * sizeof(B)))) != 100 ? 99 : 100;
}
private static unsafe long test_376(B* pb, long[,,] i, long ii, byte jj)
{
return AA.get_bv1((&pb[i[ii - jj, 0, ii - jj] = ii - 1])) != 100 ? 99 : 100;
}
private static unsafe long test_383(ulong ub, byte lb)
{
return AA.get_bv1(((B*)(ub | lb))) != 100 ? 99 : 100;
}
private static unsafe long test_390(long p, long s)
{
return AA.get_bv1(((B*)((p >> 4) | s))) != 100 ? 99 : 100;
}
private static unsafe int Main()
{
AA loc_x = new AA(0, 100);
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_5(loc_x.m_b) != 100)
{
Console.WriteLine("test_5() failed.");
return 105;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_12() != 100)
{
Console.WriteLine("test_12() failed.");
return 112;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_19(&loc_x.m_b - 2, 1) != 100)
{
Console.WriteLine("test_19() failed.");
return 119;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_26(&loc_x.m_b, &loc_x.m_b) != 100)
{
Console.WriteLine("test_26() failed.");
return 126;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_33((long)&loc_x.m_b) != 100)
{
Console.WriteLine("test_33() failed.");
return 133;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_40(&loc_x.m_b) != 100)
{
Console.WriteLine("test_40() failed.");
return 140;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_47(new B[] { new B(), new B(), loc_x.m_b }, 2) != 100)
{
Console.WriteLine("test_47() failed.");
return 147;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_54(&loc_x.m_b - 6) != 100)
{
Console.WriteLine("test_54() failed.");
return 154;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_61(&loc_x.m_b - 1, new long[,,] { { { 0 } }, { { 0 } } }, 2) != 100)
{
Console.WriteLine("test_61() failed.");
return 161;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_68(&loc_x) != 100)
{
Console.WriteLine("test_68() failed.");
return 168;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_75((byte)(((long)&loc_x.m_b) - ((long)&loc_x.m_a)), &loc_x.m_a) != 100)
{
Console.WriteLine("test_75() failed.");
return 175;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_82() != 100)
{
Console.WriteLine("test_82() failed.");
return 182;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_89(new B[][] { new B[] { new B(), new B() }, new B[] { new B(), loc_x.m_b } }, 1, 1) != 100)
{
Console.WriteLine("test_89() failed.");
return 189;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_96(&loc_x.m_b - 8, 8) != 100)
{
Console.WriteLine("test_96() failed.");
return 196;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_103(&loc_x.m_b - 1, new long[,,] { { { 0 } }, { { 0 } } }, 2, 2) != 100)
{
Console.WriteLine("test_103() failed.");
return 203;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_110(((ulong)&loc_x.m_b) & (~(ulong)0xff), unchecked((byte)&loc_x.m_b)) != 100)
{
Console.WriteLine("test_110() failed.");
return 210;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_117(((long)(&loc_x.m_b)) << 4, ((long)(&loc_x.m_b)) & 0xff000000) != 100)
{
Console.WriteLine("test_117() failed.");
return 217;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_124(new B[] { loc_x.m_b }) != 100)
{
Console.WriteLine("test_124() failed.");
return 224;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_131(&loc_x.m_b - 1) != 100)
{
Console.WriteLine("test_131() failed.");
return 231;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_138(&loc_x.m_b - 1, new long[] { 0, 1 }, 1) != 100)
{
Console.WriteLine("test_138() failed.");
return 238;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_145(&loc_x) != 100)
{
Console.WriteLine("test_145() failed.");
return 245;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_152((long)(((long)&loc_x.m_b) - 1)) != 100)
{
Console.WriteLine("test_152() failed.");
return 252;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_159(&loc_x.m_b) != 100)
{
Console.WriteLine("test_159() failed.");
return 259;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_166(new AA[,] { { new AA(), new AA() }, { new AA(), loc_x } }, 2) != 100)
{
Console.WriteLine("test_166() failed.");
return 266;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_173(&loc_x.m_b - 8, 8) != 100)
{
Console.WriteLine("test_173() failed.");
return 273;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_180(&loc_x.m_b + 1, &loc_x.m_b) != 100)
{
Console.WriteLine("test_180() failed.");
return 280;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_187((long)&loc_x.m_b) != 100)
{
Console.WriteLine("test_187() failed.");
return 287;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_194(((double*)(&loc_x.m_b)) - 4, 4) != 100)
{
Console.WriteLine("test_194() failed.");
return 294;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_201(ref loc_x.m_b) != 100)
{
Console.WriteLine("test_201() failed.");
return 301;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_208(&loc_x.m_b + 1) != 100)
{
Console.WriteLine("test_208() failed.");
return 308;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_215(&loc_x.m_b + 2, 1) != 100)
{
Console.WriteLine("test_215() failed.");
return 315;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_222(&loc_x) != 100)
{
Console.WriteLine("test_222() failed.");
return 322;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_229((long)(long)&loc_x.m_b) != 100)
{
Console.WriteLine("test_229() failed.");
return 329;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_236(&loc_x.m_b) != 100)
{
Console.WriteLine("test_236() failed.");
return 336;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_243(new B[,] { { new B(), new B() }, { new B(), loc_x.m_b } }, 1, 1) != 100)
{
Console.WriteLine("test_243() failed.");
return 343;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_250(&loc_x.m_b + 8) != 100)
{
Console.WriteLine("test_250() failed.");
return 350;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_257(&loc_x.m_b - 2, &loc_x.m_b - 1, &loc_x.m_b + 1) != 100)
{
Console.WriteLine("test_257() failed.");
return 357;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_264(&loc_x.m_b, true) != 100)
{
Console.WriteLine("test_264() failed.");
return 364;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_271(((byte*)(&loc_x.m_b)) - 7) != 100)
{
Console.WriteLine("test_271() failed.");
return 371;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_278(loc_x.m_b) != 100)
{
Console.WriteLine("test_278() failed.");
return 378;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_285() != 100)
{
Console.WriteLine("test_285() failed.");
return 385;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_292(&loc_x.m_b - 2, 1) != 100)
{
Console.WriteLine("test_292() failed.");
return 392;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_299(&loc_x.m_b, &loc_x.m_b) != 100)
{
Console.WriteLine("test_299() failed.");
return 399;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_306((long)&loc_x.m_b) != 100)
{
Console.WriteLine("test_306() failed.");
return 406;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_313(&loc_x.m_b) != 100)
{
Console.WriteLine("test_313() failed.");
return 413;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_320(new B[] { new B(), new B(), loc_x.m_b }, 2) != 100)
{
Console.WriteLine("test_320() failed.");
return 420;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_327(&loc_x.m_b - 6) != 100)
{
Console.WriteLine("test_327() failed.");
return 427;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_334(&loc_x.m_b - 1, new long[,,] { { { 0 } }, { { 0 } } }, 2) != 100)
{
Console.WriteLine("test_334() failed.");
return 434;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_341(&loc_x) != 100)
{
Console.WriteLine("test_341() failed.");
return 441;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_348((byte)(((long)&loc_x.m_b) - ((long)&loc_x.m_a)), &loc_x.m_a) != 100)
{
Console.WriteLine("test_348() failed.");
return 448;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_355() != 100)
{
Console.WriteLine("test_355() failed.");
return 455;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_362(new B[][] { new B[] { new B(), new B() }, new B[] { new B(), loc_x.m_b } }, 1, 1) != 100)
{
Console.WriteLine("test_362() failed.");
return 462;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_369(&loc_x.m_b - 8, 8) != 100)
{
Console.WriteLine("test_369() failed.");
return 469;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_376(&loc_x.m_b - 1, new long[,,] { { { 0 } }, { { 0 } } }, 2, 2) != 100)
{
Console.WriteLine("test_376() failed.");
return 476;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_383(((ulong)&loc_x.m_b) & (~(ulong)0xff), unchecked((byte)&loc_x.m_b)) != 100)
{
Console.WriteLine("test_383() failed.");
return 483;
}
AA.init_all(0);
loc_x = new AA(0, 100);
if (test_390(((long)(&loc_x.m_b)) << 4, ((long)(&loc_x.m_b)) & 0xff000000) != 100)
{
Console.WriteLine("test_390() failed.");
return 490;
}
Console.WriteLine("All tests passed.");
return 100;
}
}
| |
using System;
using System.Collections.Generic;
using System.Collections.ObjectModel;
using System.ComponentModel;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.IO;
using System.Linq;
using System.Net.Http;
using System.Net.Http.Formatting;
using System.Net.Http.Headers;
using System.Web.Http.Description;
using System.Xml.Linq;
using Newtonsoft.Json;
namespace Associations.Areas.HelpPage
{
/// <summary>
/// This class will generate the samples for the help page.
/// </summary>
public class HelpPageSampleGenerator
{
/// <summary>
/// Initializes a new instance of the <see cref="HelpPageSampleGenerator"/> class.
/// </summary>
public HelpPageSampleGenerator()
{
ActualHttpMessageTypes = new Dictionary<HelpPageSampleKey, Type>();
ActionSamples = new Dictionary<HelpPageSampleKey, object>();
SampleObjects = new Dictionary<Type, object>();
SampleObjectFactories = new List<Func<HelpPageSampleGenerator, Type, object>>
{
DefaultSampleObjectFactory,
};
}
/// <summary>
/// Gets CLR types that are used as the content of <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/>.
/// </summary>
public IDictionary<HelpPageSampleKey, Type> ActualHttpMessageTypes { get; internal set; }
/// <summary>
/// Gets the objects that are used directly as samples for certain actions.
/// </summary>
public IDictionary<HelpPageSampleKey, object> ActionSamples { get; internal set; }
/// <summary>
/// Gets the objects that are serialized as samples by the supported formatters.
/// </summary>
public IDictionary<Type, object> SampleObjects { get; internal set; }
/// <summary>
/// Gets factories for the objects that the supported formatters will serialize as samples. Processed in order,
/// stopping when the factory successfully returns a non-<see langref="null"/> object.
/// </summary>
/// <remarks>
/// Collection includes just <see cref="ObjectGenerator.GenerateObject(Type)"/> initially. Use
/// <code>SampleObjectFactories.Insert(0, func)</code> to provide an override and
/// <code>SampleObjectFactories.Add(func)</code> to provide a fallback.</remarks>
[SuppressMessage("Microsoft.Design", "CA1006:DoNotNestGenericTypesInMemberSignatures",
Justification = "This is an appropriate nesting of generic types")]
public IList<Func<HelpPageSampleGenerator, Type, object>> SampleObjectFactories { get; private set; }
/// <summary>
/// Gets the request body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleRequests(ApiDescription api)
{
return GetSample(api, SampleDirection.Request);
}
/// <summary>
/// Gets the response body samples for a given <see cref="ApiDescription"/>.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The samples keyed by media type.</returns>
public IDictionary<MediaTypeHeaderValue, object> GetSampleResponses(ApiDescription api)
{
return GetSample(api, SampleDirection.Response);
}
/// <summary>
/// Gets the request or response body samples.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The samples keyed by media type.</returns>
public virtual IDictionary<MediaTypeHeaderValue, object> GetSample(ApiDescription api, SampleDirection sampleDirection)
{
if (api == null)
{
throw new ArgumentNullException("api");
}
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
Type type = ResolveType(api, controllerName, actionName, parameterNames, sampleDirection, out formatters);
var samples = new Dictionary<MediaTypeHeaderValue, object>();
// Use the samples provided directly for actions
var actionSamples = GetAllActionSamples(controllerName, actionName, parameterNames, sampleDirection);
foreach (var actionSample in actionSamples)
{
samples.Add(actionSample.Key.MediaType, WrapSampleIfString(actionSample.Value));
}
// Do the sample generation based on formatters only if an action doesn't return an HttpResponseMessage.
// Here we cannot rely on formatters because we don't know what's in the HttpResponseMessage, it might not even use formatters.
if (type != null && !typeof(HttpResponseMessage).IsAssignableFrom(type))
{
object sampleObject = GetSampleObject(type);
foreach (var formatter in formatters)
{
foreach (MediaTypeHeaderValue mediaType in formatter.SupportedMediaTypes)
{
if (!samples.ContainsKey(mediaType))
{
object sample = GetActionSample(controllerName, actionName, parameterNames, type, formatter, mediaType, sampleDirection);
// If no sample found, try generate sample using formatter and sample object
if (sample == null && sampleObject != null)
{
sample = WriteSampleObjectUsingFormatter(formatter, sampleObject, type, mediaType);
}
samples.Add(mediaType, WrapSampleIfString(sample));
}
}
}
}
return samples;
}
/// <summary>
/// Search for samples that are provided directly through <see cref="ActionSamples"/>.
/// </summary>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="type">The CLR type.</param>
/// <param name="formatter">The formatter.</param>
/// <param name="mediaType">The media type.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or for a response.</param>
/// <returns>The sample that matches the parameters.</returns>
public virtual object GetActionSample(string controllerName, string actionName, IEnumerable<string> parameterNames, Type type, MediaTypeFormatter formatter, MediaTypeHeaderValue mediaType, SampleDirection sampleDirection)
{
object sample;
// First, try to get the sample provided for the specified mediaType, sampleDirection, controllerName, actionName and parameterNames.
// If not found, try to get the sample provided for the specified mediaType, sampleDirection, controllerName and actionName regardless of the parameterNames.
// If still not found, try to get the sample provided for the specified mediaType and type.
// Finally, try to get the sample provided for the specified mediaType.
if (ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, parameterNames), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, sampleDirection, controllerName, actionName, new[] { "*" }), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType, type), out sample) ||
ActionSamples.TryGetValue(new HelpPageSampleKey(mediaType), out sample))
{
return sample;
}
return null;
}
/// <summary>
/// Gets the sample object that will be serialized by the formatters.
/// First, it will look at the <see cref="SampleObjects"/>. If no sample object is found, it will try to create
/// one using <see cref="DefaultSampleObjectFactory"/> (which wraps an <see cref="ObjectGenerator"/>) and other
/// factories in <see cref="SampleObjectFactories"/>.
/// </summary>
/// <param name="type">The type.</param>
/// <returns>The sample object.</returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes",
Justification = "Even if all items in SampleObjectFactories throw, problem will be visible as missing sample.")]
public virtual object GetSampleObject(Type type)
{
object sampleObject;
if (!SampleObjects.TryGetValue(type, out sampleObject))
{
// No specific object available, try our factories.
foreach (Func<HelpPageSampleGenerator, Type, object> factory in SampleObjectFactories)
{
if (factory == null)
{
continue;
}
try
{
sampleObject = factory(this, type);
if (sampleObject != null)
{
break;
}
}
catch
{
// Ignore any problems encountered in the factory; go on to the next one (if any).
}
}
}
return sampleObject;
}
/// <summary>
/// Resolves the actual type of <see cref="System.Net.Http.ObjectContent{T}"/> passed to the <see cref="System.Net.Http.HttpRequestMessage"/> in an action.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <returns>The type.</returns>
public virtual Type ResolveHttpRequestMessageType(ApiDescription api)
{
string controllerName = api.ActionDescriptor.ControllerDescriptor.ControllerName;
string actionName = api.ActionDescriptor.ActionName;
IEnumerable<string> parameterNames = api.ParameterDescriptions.Select(p => p.Name);
Collection<MediaTypeFormatter> formatters;
return ResolveType(api, controllerName, actionName, parameterNames, SampleDirection.Request, out formatters);
}
/// <summary>
/// Resolves the type of the action parameter or return value when <see cref="HttpRequestMessage"/> or <see cref="HttpResponseMessage"/> is used.
/// </summary>
/// <param name="api">The <see cref="ApiDescription"/>.</param>
/// <param name="controllerName">Name of the controller.</param>
/// <param name="actionName">Name of the action.</param>
/// <param name="parameterNames">The parameter names.</param>
/// <param name="sampleDirection">The value indicating whether the sample is for a request or a response.</param>
/// <param name="formatters">The formatters.</param>
[SuppressMessage("Microsoft.Design", "CA1021:AvoidOutParameters", Justification = "This is only used in advanced scenarios.")]
public virtual Type ResolveType(ApiDescription api, string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection, out Collection<MediaTypeFormatter> formatters)
{
if (!Enum.IsDefined(typeof(SampleDirection), sampleDirection))
{
throw new InvalidEnumArgumentException("sampleDirection", (int)sampleDirection, typeof(SampleDirection));
}
if (api == null)
{
throw new ArgumentNullException("api");
}
Type type;
if (ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, parameterNames), out type) ||
ActualHttpMessageTypes.TryGetValue(new HelpPageSampleKey(sampleDirection, controllerName, actionName, new[] { "*" }), out type))
{
// Re-compute the supported formatters based on type
Collection<MediaTypeFormatter> newFormatters = new Collection<MediaTypeFormatter>();
foreach (var formatter in api.ActionDescriptor.Configuration.Formatters)
{
if (IsFormatSupported(sampleDirection, formatter, type))
{
newFormatters.Add(formatter);
}
}
formatters = newFormatters;
}
else
{
switch (sampleDirection)
{
case SampleDirection.Request:
ApiParameterDescription requestBodyParameter = api.ParameterDescriptions.FirstOrDefault(p => p.Source == ApiParameterSource.FromBody);
type = requestBodyParameter == null ? null : requestBodyParameter.ParameterDescriptor.ParameterType;
formatters = api.SupportedRequestBodyFormatters;
break;
case SampleDirection.Response:
default:
type = api.ResponseDescription.ResponseType ?? api.ResponseDescription.DeclaredType;
formatters = api.SupportedResponseFormatters;
break;
}
}
return type;
}
/// <summary>
/// Writes the sample object using formatter.
/// </summary>
/// <param name="formatter">The formatter.</param>
/// <param name="value">The value.</param>
/// <param name="type">The type.</param>
/// <param name="mediaType">Type of the media.</param>
/// <returns></returns>
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "The exception is recorded as InvalidSample.")]
public virtual object WriteSampleObjectUsingFormatter(MediaTypeFormatter formatter, object value, Type type, MediaTypeHeaderValue mediaType)
{
if (formatter == null)
{
throw new ArgumentNullException("formatter");
}
if (mediaType == null)
{
throw new ArgumentNullException("mediaType");
}
object sample = String.Empty;
MemoryStream ms = null;
HttpContent content = null;
try
{
if (formatter.CanWriteType(type))
{
ms = new MemoryStream();
content = new ObjectContent(type, value, formatter, mediaType);
formatter.WriteToStreamAsync(type, value, ms, content, null).Wait();
ms.Position = 0;
StreamReader reader = new StreamReader(ms);
string serializedSampleString = reader.ReadToEnd();
if (mediaType.MediaType.ToUpperInvariant().Contains("XML"))
{
serializedSampleString = TryFormatXml(serializedSampleString);
}
else if (mediaType.MediaType.ToUpperInvariant().Contains("JSON"))
{
serializedSampleString = TryFormatJson(serializedSampleString);
}
sample = new TextSample(serializedSampleString);
}
else
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"Failed to generate the sample for media type '{0}'. Cannot use formatter '{1}' to write type '{2}'.",
mediaType,
formatter.GetType().Name,
type.Name));
}
}
catch (Exception e)
{
sample = new InvalidSample(String.Format(
CultureInfo.CurrentCulture,
"An exception has occurred while using the formatter '{0}' to generate sample for media type '{1}'. Exception message: {2}",
formatter.GetType().Name,
mediaType.MediaType,
UnwrapException(e).Message));
}
finally
{
if (ms != null)
{
ms.Dispose();
}
if (content != null)
{
content.Dispose();
}
}
return sample;
}
internal static Exception UnwrapException(Exception exception)
{
AggregateException aggregateException = exception as AggregateException;
if (aggregateException != null)
{
return aggregateException.Flatten().InnerException;
}
return exception;
}
// Default factory for sample objects
private static object DefaultSampleObjectFactory(HelpPageSampleGenerator sampleGenerator, Type type)
{
// Try to create a default sample object
ObjectGenerator objectGenerator = new ObjectGenerator();
return objectGenerator.GenerateObject(type);
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatJson(string str)
{
try
{
object parsedJson = JsonConvert.DeserializeObject(str);
return JsonConvert.SerializeObject(parsedJson, Formatting.Indented);
}
catch
{
// can't parse JSON, return the original string
return str;
}
}
[SuppressMessage("Microsoft.Design", "CA1031:DoNotCatchGeneralExceptionTypes", Justification = "Handling the failure by returning the original string.")]
private static string TryFormatXml(string str)
{
try
{
XDocument xml = XDocument.Parse(str);
return xml.ToString();
}
catch
{
// can't parse XML, return the original string
return str;
}
}
private static bool IsFormatSupported(SampleDirection sampleDirection, MediaTypeFormatter formatter, Type type)
{
switch (sampleDirection)
{
case SampleDirection.Request:
return formatter.CanReadType(type);
case SampleDirection.Response:
return formatter.CanWriteType(type);
}
return false;
}
private IEnumerable<KeyValuePair<HelpPageSampleKey, object>> GetAllActionSamples(string controllerName, string actionName, IEnumerable<string> parameterNames, SampleDirection sampleDirection)
{
HashSet<string> parameterNamesSet = new HashSet<string>(parameterNames, StringComparer.OrdinalIgnoreCase);
foreach (var sample in ActionSamples)
{
HelpPageSampleKey sampleKey = sample.Key;
if (String.Equals(controllerName, sampleKey.ControllerName, StringComparison.OrdinalIgnoreCase) &&
String.Equals(actionName, sampleKey.ActionName, StringComparison.OrdinalIgnoreCase) &&
(sampleKey.ParameterNames.SetEquals(new[] { "*" }) || parameterNamesSet.SetEquals(sampleKey.ParameterNames)) &&
sampleDirection == sampleKey.SampleDirection)
{
yield return sample;
}
}
}
private static object WrapSampleIfString(object sample)
{
string stringSample = sample as string;
if (stringSample != null)
{
return new TextSample(stringSample);
}
return sample;
}
}
}
| |
// Copyright (c) 2015, Outercurve Foundation.
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without modification,
// are permitted provided that the following conditions are met:
//
// - Redistributions of source code must retain the above copyright notice, this
// list of conditions and the following disclaimer.
//
// - Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
// and/or other materials provided with the distribution.
//
// - Neither the name of the Outercurve Foundation nor the names of its
// contributors may be used to endorse or promote products derived from this
// software without specific prior written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
// ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
using System;
using System.Linq;
using System.Collections.Generic;
using System.Web.UI;
using System.Web.UI.WebControls;
using WebsitePanel.Providers.HostedSolution;
namespace WebsitePanel.Portal.ExchangeServer.UserControls
{
public partial class AccountsList : WebsitePanelControlBase
{
private enum SelectedState
{
All,
Selected,
Unselected
}
public bool Disabled
{
get { return ViewState["Disabled"] != null ? (bool)ViewState["Disabled"] : false; }
set { ViewState["Disabled"] = value; }
}
public bool EnableMailboxOnly
{
get {return ViewState["EnableMailboxOnly"] != null ? (bool)ViewState["EnableMailboxOnly"]: false; }
set { ViewState["EnableMailboxOnly"] = value; }
}
public bool MailboxesEnabled
{
get { return ViewState["MailboxesEnabled"] != null ? (bool)ViewState["MailboxesEnabled"] : false; }
set { ViewState["MailboxesEnabled"] = value; }
}
public bool ContactsEnabled
{
get { return ViewState["ContactsEnabled"] != null ? (bool)ViewState["ContactsEnabled"] : false; }
set { ViewState["ContactsEnabled"] = value; }
}
public bool DistributionListsEnabled
{
get { return ViewState["DistributionListsEnabled"] != null ? (bool)ViewState["DistributionListsEnabled"] : false; }
set { ViewState["DistributionListsEnabled"] = value; }
}
public bool SecurityGroupsEnabled
{
get { return ViewState["SecurityGroupsEnabled"] != null ? (bool)ViewState["SecurityGroupsEnabled"] : false; }
set { ViewState["SecurityGroupsEnabled"] = value; }
}
public bool SharedMailboxEnabled
{
get { return ViewState["SharedMailboxEnabled"] != null ? (bool)ViewState["SharedMailboxEnabled"] : false; }
set { ViewState["SharedMailboxEnabled"] = value; }
}
public int ExcludeAccountId
{
get { return PanelRequest.AccountID; }
}
public void SetAccounts(ExchangeAccount[] accounts)
{
BindAccounts(accounts, false);
}
public string[] GetAccounts()
{
// get selected accounts
List<ExchangeAccount> selectedAccounts = GetGridViewAccounts(gvAccounts, SelectedState.All);
List<string> accountNames = new List<string>();
foreach (ExchangeAccount account in selectedAccounts)
accountNames.Add(account.AccountName);
return accountNames.ToArray();
}
public IDictionary<string, ExchangeAccountType> GetFullAccounts()
{
// get selected accounts
List<ExchangeAccount> selectedAccounts = GetGridViewAccounts(gvAccounts, SelectedState.All);
IDictionary<string, ExchangeAccountType> accounts = new Dictionary<string, ExchangeAccountType>();
foreach (ExchangeAccount account in selectedAccounts)
{
accounts.Add(account.AccountName, account.AccountType);
}
return accounts;
}
protected void Page_Load(object sender, EventArgs e)
{
// toggle controls
if (!IsPostBack)
{
if (Disabled)
{
btnAdd.Visible = btnDelete.Visible = gvAccounts.Columns[0].Visible = false;
}
chkIncludeMailboxes.Visible = chkIncludeRooms.Visible = chkIncludeEquipment.Visible = MailboxesEnabled;
chkIncludeMailboxes.Checked = chkIncludeRooms.Checked = chkIncludeEquipment.Checked = MailboxesEnabled;
if (EnableMailboxOnly)
{
chkIncludeRooms.Checked = false;
chkIncludeRooms.Visible = false;
chkIncludeEquipment.Checked = false;
chkIncludeEquipment.Visible = false;
}
chkIncludeContacts.Visible = ContactsEnabled;
chkIncludeContacts.Checked = ContactsEnabled;
chkIncludeLists.Visible = DistributionListsEnabled;
chkIncludeLists.Checked = DistributionListsEnabled;
chkIncludeGroups.Visible = SecurityGroupsEnabled;
chkIncludeGroups.Checked = SecurityGroupsEnabled;
chkIncludeSharedMailbox.Visible = SharedMailboxEnabled;
chkIncludeSharedMailbox.Checked = SharedMailboxEnabled;
gvAccounts.Columns[3].Visible = gvPopupAccounts.Columns[3].Visible = SecurityGroupsEnabled;
}
// register javascript
if (!Page.ClientScript.IsClientScriptBlockRegistered("SelectAllCheckboxes"))
{
string script = @" function SelectAllCheckboxes(box)
{
var state = box.checked;
var elm = box.parentElement.parentElement.parentElement.parentElement.getElementsByTagName(""INPUT"");
for(i = 0; i < elm.length; i++)
if(elm[i].type == ""checkbox"" && elm[i].id != box.id && elm[i].checked != state && !elm[i].disabled)
elm[i].checked = state;
}";
Page.ClientScript.RegisterClientScriptBlock(typeof(AccountsList), "SelectAllCheckboxes",
script, true);
}
}
public string GetAccountImage(int accountTypeId)
{
ExchangeAccountType accountType = (ExchangeAccountType)accountTypeId;
string imgName = "mailbox_16.gif";
if (accountType == ExchangeAccountType.Contact)
imgName = "contact_16.gif";
else if (accountType == ExchangeAccountType.DistributionList
|| accountType == ExchangeAccountType.SecurityGroup
|| accountType == ExchangeAccountType.DefaultSecurityGroup)
imgName = "dlist_16.gif";
else if (accountType == ExchangeAccountType.Room)
imgName = "room_16.gif";
else if (accountType == ExchangeAccountType.Equipment)
imgName = "equipment_16.gif";
else if (accountType == ExchangeAccountType.SharedMailbox)
imgName = "shared_16.gif";
return GetThemedImage("Exchange/" + imgName);
}
public string GetType(int accountTypeId)
{
ExchangeAccountType accountType = (ExchangeAccountType)accountTypeId;
switch(accountType)
{
case ExchangeAccountType.DistributionList:
return "Distribution";
case ExchangeAccountType.SecurityGroup:
return "Security";
case ExchangeAccountType.DefaultSecurityGroup:
return "Default";
default:
return string.Empty;
}
}
protected void btnAdd_Click(object sender, EventArgs e)
{
// bind all accounts
BindPopupAccounts();
// show modal
AddAccountsModal.Show();
}
protected void btnDelete_Click(object sender, EventArgs e)
{
// get selected accounts
List<ExchangeAccount> selectedAccounts = GetGridViewAccounts(gvAccounts, SelectedState.Unselected);
// add to the main list
BindAccounts(selectedAccounts.ToArray(), false);
}
protected void btnAddSelected_Click(object sender, EventArgs e)
{
// get selected accounts
List<ExchangeAccount> selectedAccounts = GetGridViewAccounts(gvPopupAccounts, SelectedState.Selected);
// add to the main list
BindAccounts(selectedAccounts.ToArray(), true);
}
private void BindPopupAccounts()
{
List<ExchangeAccountType> types = new List<ExchangeAccountType>();
if (chkIncludeMailboxes.Checked) types.Add(ExchangeAccountType.Mailbox);
if (chkIncludeContacts.Checked) types.Add(ExchangeAccountType.Contact);
if (chkIncludeLists.Checked) types.Add(ExchangeAccountType.DistributionList);
if (chkIncludeRooms.Checked) types.Add(ExchangeAccountType.Room);
if (chkIncludeEquipment.Checked) types.Add(ExchangeAccountType.Equipment);
if (chkIncludeGroups.Checked) types.Add(ExchangeAccountType.SecurityGroup);
if (chkIncludeSharedMailbox.Checked) types.Add(ExchangeAccountType.SharedMailbox);
ExchangeAccount[] accounts = ES.Services.ExchangeServer.SearchAccountsByTypes(PanelRequest.ItemID,
types.ToArray(),
ddlSearchColumn.SelectedValue, txtSearchValue.Text + "%", "");
if (SecurityGroupsEnabled)
{
accounts = accounts.Where(x => !GetAccounts().Contains(x.AccountName)).ToArray();
}
if (ExcludeAccountId > 0)
{
List<ExchangeAccount> updatedAccounts = new List<ExchangeAccount>();
foreach (ExchangeAccount account in accounts)
if (account.AccountId != ExcludeAccountId)
updatedAccounts.Add(account);
accounts = updatedAccounts.ToArray();
}
gvPopupAccounts.DataSource = accounts;
gvPopupAccounts.DataBind();
if (gvPopupAccounts.Rows.Count > 0)
{
UpdateGridViewAccounts(gvPopupAccounts);
}
}
private void BindAccounts(ExchangeAccount[] newAccounts, bool preserveExisting)
{
// get binded addresses
List<ExchangeAccount> accounts = new List<ExchangeAccount>();
if(preserveExisting)
accounts.AddRange(GetGridViewAccounts(gvAccounts, SelectedState.All));
// add new accounts
if (newAccounts != null)
{
foreach (ExchangeAccount newAccount in newAccounts)
{
// check if exists
bool exists = false;
foreach (ExchangeAccount account in accounts)
{
if (String.Compare(newAccount.AccountName, account.AccountName, true) == 0)
{
exists = true;
break;
}
}
if (exists)
continue;
accounts.Add(newAccount);
}
}
gvAccounts.DataSource = accounts;
gvAccounts.DataBind();
if (gvAccounts.Rows.Count > 0)
{
UpdateGridViewAccounts(gvAccounts);
}
btnDelete.Visible = gvAccounts.Rows.Count > 0;
}
private List<ExchangeAccount> GetGridViewAccounts(GridView gv, SelectedState state)
{
List<ExchangeAccount> accounts = new List<ExchangeAccount>();
for (int i = 0; i < gv.Rows.Count; i++)
{
GridViewRow row = gv.Rows[i];
CheckBox chkSelect = (CheckBox)row.FindControl("chkSelect");
if (chkSelect == null)
continue;
ExchangeAccount account = new ExchangeAccount();
account.AccountType = (ExchangeAccountType)Enum.Parse(typeof(ExchangeAccountType), ((Literal)row.FindControl("litAccountType")).Text);
account.AccountName = (string)gv.DataKeys[i][0];
account.DisplayName = ((Literal)row.FindControl("litDisplayName")).Text;
account.PrimaryEmailAddress = ((Literal)row.FindControl("litPrimaryEmailAddress")).Text;
if(state == SelectedState.All ||
(state == SelectedState.Selected && chkSelect.Checked) ||
(state == SelectedState.Unselected && !chkSelect.Checked))
accounts.Add(account);
}
return accounts;
}
private void UpdateGridViewAccounts(GridView gv)
{
CheckBox chkSelectAll = (CheckBox)gv.HeaderRow.FindControl("chkSelectAll");
for (int i = 0; i < gv.Rows.Count; i++)
{
GridViewRow row = gv.Rows[i];
CheckBox chkSelect = (CheckBox)row.FindControl("chkSelect");
if (chkSelect == null)
{
continue;
}
ExchangeAccountType exAccountType = (ExchangeAccountType)Enum.Parse(typeof(ExchangeAccountType), ((Literal)row.FindControl("litAccountType")).Text);
if (exAccountType != ExchangeAccountType.DefaultSecurityGroup)
{
chkSelectAll = null;
chkSelect.Enabled = true;
}
else
{
chkSelect.Enabled = false;
}
}
if (chkSelectAll != null)
{
chkSelectAll.Enabled = false;
}
}
protected void chkIncludeMailboxes_CheckedChanged(object sender, EventArgs e)
{
BindPopupAccounts();
}
protected void cmdSearch_Click(object sender, ImageClickEventArgs e)
{
BindPopupAccounts();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using System.Diagnostics;
using Internal.Runtime.Augments;
namespace System.Threading
{
/// <summary>
/// A lightweight non-recursive mutex.
///
/// Used by the wait subsystem on Unix, so this class cannot have any dependencies on the wait subsystem.
/// </summary>
internal sealed class LowLevelLock : IDisposable
{
private const int LockedMask = 1;
private const int WaiterCountIncrement = 2;
private const int MaximumPreemptingAcquireDurationMilliseconds = 200;
/// <summary>
/// Layout:
/// - Bit 0: 1 if the lock is locked, 0 otherwise
/// - Remaining bits: Number of threads waiting to acquire a lock
/// </summary>
private int _state;
#if DEBUG
private RuntimeThread _ownerThread;
#endif
/// <summary>
/// Indicates whether a thread has been signaled, but has not yet been released from the wait. See
/// <see cref="SignalWaiter"/>. Reads and writes must occur while <see cref="_monitor"/> is locked.
/// </summary>
private bool _isAnyWaitingThreadSignaled;
private FirstLevelSpinWaiter _spinWaiter;
private readonly Func<bool> _spinWaitTryAcquireCallback;
private readonly LowLevelMonitor _monitor;
public LowLevelLock()
{
#if DEBUG
_ownerThread = null;
#endif
_spinWaiter = new FirstLevelSpinWaiter();
_spinWaiter.Initialize();
_spinWaitTryAcquireCallback = SpinWaitTryAcquireCallback;
_monitor = new LowLevelMonitor();
}
~LowLevelLock()
{
Dispose();
}
public void Dispose()
{
VerifyIsNotLockedByAnyThread();
_monitor.Dispose();
GC.SuppressFinalize(this);
}
public void VerifyIsLocked()
{
#if DEBUG
Debug.Assert(_ownerThread == RuntimeThread.CurrentThread);
Debug.Assert((_state & LockedMask) != 0);
#endif
}
public void VerifyIsNotLocked()
{
#if DEBUG
Debug.Assert(_ownerThread != RuntimeThread.CurrentThread);
#endif
}
private void VerifyIsNotLockedByAnyThread()
{
#if DEBUG
Debug.Assert(_ownerThread == null);
#endif
}
private void ResetOwnerThread()
{
#if DEBUG
VerifyIsLocked();
_ownerThread = null;
#endif
}
private void SetOwnerThreadToCurrent()
{
#if DEBUG
VerifyIsNotLockedByAnyThread();
_ownerThread = RuntimeThread.CurrentThread;
#endif
}
public bool TryAcquire()
{
VerifyIsNotLocked();
// A common case is that there are no waiters, so hope for that and try to acquire the lock
int state = Interlocked.CompareExchange(ref _state, LockedMask, 0);
if (state == 0 || TryAcquire_NoFastPath(state))
{
SetOwnerThreadToCurrent();
return true;
}
return false;
}
private bool TryAcquire_NoFastPath(int state)
{
// The lock may be available, but there may be waiters. This thread could acquire the lock in that case. Acquiring
// the lock means that if this thread is repeatedly acquiring and releasing the lock, it could permanently starve
// waiters. Waiting instead in the same situation would deterministically create a lock convoy. Here, we opt for
// acquiring the lock to prevent a deterministic lock convoy in that situation, and rely on the system's
// waiting/waking implementation to mitigate starvation, even in cases where there are enough logical processors to
// accommodate all threads.
return (state & LockedMask) == 0 && Interlocked.CompareExchange(ref _state, state + LockedMask, state) == state;
}
private bool SpinWaitTryAcquireCallback() => TryAcquire_NoFastPath(_state);
public void Acquire()
{
if (!TryAcquire())
{
WaitAndAcquire();
}
}
private void WaitAndAcquire()
{
VerifyIsNotLocked();
// Spin a bit to see if the lock becomes available, before forcing the thread into a wait state
if (_spinWaiter.SpinWaitForCondition(_spinWaitTryAcquireCallback))
{
Debug.Assert((_state & LockedMask) != 0);
SetOwnerThreadToCurrent();
return;
}
_monitor.Acquire();
/// Register this thread as a waiter by incrementing the waiter count. Incrementing the waiter count and waiting on
/// the monitor need to appear atomic to <see cref="SignalWaiter"/> so that its signal won't be lost.
int state = Interlocked.Add(ref _state, WaiterCountIncrement);
// Wait on the monitor until signaled, repeatedly until the lock can be acquired by this thread
while (true)
{
// The lock may have been released before the waiter count was incremented above, so try to acquire the lock
// with the new state before waiting
if ((state & LockedMask) == 0 &&
Interlocked.CompareExchange(ref _state, state + (LockedMask - WaiterCountIncrement), state) == state)
{
break;
}
_monitor.Wait();
/// Indicate to <see cref="SignalWaiter"/> that the signaled thread has woken up
Debug.Assert(_isAnyWaitingThreadSignaled);
_isAnyWaitingThreadSignaled = false;
state = _state;
Debug.Assert((uint)state >= WaiterCountIncrement);
}
_monitor.Release();
Debug.Assert((_state & LockedMask) != 0);
SetOwnerThreadToCurrent();
}
public void Release()
{
Debug.Assert((_state & LockedMask) != 0);
ResetOwnerThread();
if (Interlocked.Decrement(ref _state) != 0)
{
SignalWaiter();
}
}
private void SignalWaiter()
{
// Since the lock was already released by the caller, there are no guarantees on the state at this point. For
// instance, if there was only one thread waiting before the lock was released, then after the lock was released,
// another thread may have acquired and released the lock, and signaled the waiter, before the first thread arrives
// here. The monitor's lock is used to synchronize changes to the waiter count, so acquire the monitor and recheck
// the waiter count before signaling.
_monitor.Acquire();
/// Keep track of whether a thread has been signaled but has not yet been released from the wait.
/// <see cref="_isAnyWaitingThreadSignaled"/> is set to false when a signaled thread wakes up. Since threads can
/// preempt waiting threads and acquire the lock (see <see cref="TryAcquire"/>), it allows for example, one thread
/// to acquire and release the lock multiple times while there are multiple waiting threads. In such a case, we
/// don't want that thread to signal a waiter every time it releases the lock, as that will cause unnecessary
/// context switches with more and more signaled threads waking up, finding that the lock is still locked, and going
/// right back into a wait state. So, signal only one waiting thread at a time.
if ((uint)_state >= WaiterCountIncrement && !_isAnyWaitingThreadSignaled)
{
_isAnyWaitingThreadSignaled = true;
_monitor.Signal_Release();
return;
}
_monitor.Release();
}
}
}
| |
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
// See the LICENSE file in the project root for more information.
using Microsoft.Win32.SafeHandles;
using System.Diagnostics;
using System.Diagnostics.CodeAnalysis;
using System.Runtime.InteropServices;
using System.Security;
using System.Threading;
using System.Threading.Tasks;
namespace System.IO.Pipes
{
public abstract partial class PipeStream : Stream
{
// The Windows implementation of PipeStream sets the stream's handle during
// creation, and as such should always have a handle, but the Unix implementation
// sometimes sets the handle not during creation but later during connection.
// As such, validation during member access needs to verify a valid handle on
// Windows, but can't assume a valid handle on Unix.
internal const bool CheckOperationsRequiresSetHandle = false;
internal static string GetPipePath(string serverName, string pipeName)
{
if (serverName != "." && serverName != Interop.Sys.GetHostName())
{
// Cross-machine pipes are not supported.
throw new PlatformNotSupportedException();
}
if (pipeName.IndexOfAny(Path.GetInvalidFileNameChars()) >= 0)
{
// Since pipes are stored as files in the file system, we don't support
// pipe names that are actually paths or that otherwise have invalid
// filename characters in them.
throw new PlatformNotSupportedException();
}
// Return the pipe path
return Path.Combine(EnsurePipeDirectoryPath(), pipeName);
}
/// <summary>Throws an exception if the supplied handle does not represent a valid pipe.</summary>
/// <param name="safePipeHandle">The handle to validate.</param>
internal void ValidateHandleIsPipe(SafePipeHandle safePipeHandle)
{
Interop.Sys.FileStatus status;
int result = CheckPipeCall(Interop.Sys.FStat(safePipeHandle, out status));
if (result == 0)
{
if ((status.Mode & Interop.Sys.FileTypes.S_IFMT) != Interop.Sys.FileTypes.S_IFIFO)
{
throw new IOException(SR.IO_InvalidPipeHandle);
}
}
}
/// <summary>Initializes the handle to be used asynchronously.</summary>
/// <param name="handle">The handle.</param>
[SecurityCritical]
private void InitializeAsyncHandle(SafePipeHandle handle)
{
// nop
}
private void UninitializeAsyncHandle()
{
// nop
}
private int ReadCore(byte[] buffer, int offset, int count)
{
return ReadCoreNoCancellation(buffer, offset, count);
}
private void WriteCore(byte[] buffer, int offset, int count)
{
WriteCoreNoCancellation(buffer, offset, count);
}
private async Task<int> ReadAsyncCore(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
SemaphoreSlim activeAsync = EnsureAsyncActiveSemaphoreInitialized();
await activeAsync.WaitAsync(cancellationToken).ForceAsync();
try
{
return cancellationToken.CanBeCanceled ?
ReadCoreWithCancellation(buffer, offset, count, cancellationToken) :
ReadCoreNoCancellation(buffer, offset, count);
}
finally
{
activeAsync.Release();
}
}
private async Task WriteAsyncCore(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
SemaphoreSlim activeAsync = EnsureAsyncActiveSemaphoreInitialized();
await activeAsync.WaitAsync(cancellationToken).ForceAsync();
try
{
if (cancellationToken.CanBeCanceled)
WriteCoreWithCancellation(buffer, offset, count, cancellationToken);
else
WriteCoreNoCancellation(buffer, offset, count);
}
finally
{
activeAsync.Release();
}
}
// Blocks until the other end of the pipe has read in all written buffer.
[SecurityCritical]
public void WaitForPipeDrain()
{
CheckWriteOperations();
if (!CanWrite)
{
throw Error.GetWriteNotSupported();
}
throw new PlatformNotSupportedException(); // no mechanism for this on Unix
}
// Gets the transmission mode for the pipe. This is virtual so that subclassing types can
// override this in cases where only one mode is legal (such as anonymous pipes)
public virtual PipeTransmissionMode TransmissionMode
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
get
{
CheckPipePropertyOperations();
return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based
}
}
// Gets the buffer size in the inbound direction for the pipe. This checks if pipe has read
// access. If that passes, call to GetNamedPipeInfo will succeed.
public virtual int InBufferSize
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands")]
get
{
CheckPipePropertyOperations();
if (!CanRead)
{
throw new NotSupportedException(SR.NotSupported_UnreadableStream);
}
return GetPipeBufferSize();
}
}
// Gets the buffer size in the outbound direction for the pipe. This uses cached version
// if it's an outbound only pipe because GetNamedPipeInfo requires read access to the pipe.
// However, returning cached is good fallback, especially if user specified a value in
// the ctor.
public virtual int OutBufferSize
{
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
get
{
CheckPipePropertyOperations();
if (!CanWrite)
{
throw new NotSupportedException(SR.NotSupported_UnwritableStream);
}
return GetPipeBufferSize();
}
}
public virtual PipeTransmissionMode ReadMode
{
[SecurityCritical]
get
{
CheckPipePropertyOperations();
return PipeTransmissionMode.Byte; // Unix pipes are only byte-based, not message-based
}
[SecurityCritical]
[SuppressMessage("Microsoft.Security", "CA2122:DoNotIndirectlyExposeMethodsWithLinkDemands", Justification = "Security model of pipes: demand at creation but no subsequent demands")]
set
{
CheckPipePropertyOperations();
if (value < PipeTransmissionMode.Byte || value > PipeTransmissionMode.Message)
{
throw new ArgumentOutOfRangeException(nameof(value), SR.ArgumentOutOfRange_TransmissionModeByteOrMsg);
}
if (value != PipeTransmissionMode.Byte) // Unix pipes are only byte-based, not message-based
{
throw new PlatformNotSupportedException();
}
// nop, since it's already the only valid value
}
}
// -----------------------------
// ---- PAL layer ends here ----
// -----------------------------
private static string s_pipeDirectoryPath;
/// <summary>
/// We want to ensure that only one asynchronous operation is actually in flight
/// at a time. The base Stream class ensures this by serializing execution via a
/// semaphore. Since we don't delegate to the base stream for Read/WriteAsync due
/// to having specialized support for cancellation, we do the same serialization here.
/// </summary>
private SemaphoreSlim _asyncActiveSemaphore;
private SemaphoreSlim EnsureAsyncActiveSemaphoreInitialized()
{
return LazyInitializer.EnsureInitialized(ref _asyncActiveSemaphore, () => new SemaphoreSlim(1, 1));
}
private static string EnsurePipeDirectoryPath()
{
const string PipesFeatureName = "pipes";
// Ideally this would simply use PersistedFiles.GetTempFeatureDirectory(PipesFeatureName) and then
// Directory.CreateDirectory to ensure it exists. But this assembly doesn't reference System.IO.FileSystem.
// As such, we'd be calling GetTempFeatureDirectory, only to then need to parse it in order
// to create each of the individual directories as part of the path. We instead access the named portions
// of the path directly and do the building of the path and directory structure manually.
// First ensure we know what the full path should be, e.g. /tmp/.dotnet/corefx/pipes/
string fullPath = s_pipeDirectoryPath;
string tempPath = null;
if (fullPath == null)
{
tempPath = Path.GetTempPath();
fullPath = Path.Combine(tempPath, PersistedFiles.TopLevelHiddenDirectory, PersistedFiles.SecondLevelDirectory, PipesFeatureName);
s_pipeDirectoryPath = fullPath;
}
// Then create the directory if it doesn't already exist. If we get any error back from stat,
// just proceed to build up the directory, failing in the CreateDirectory calls if there's some
// problem. Similarly, it's possible stat succeeds but the path is a file rather than directory; we'll
// call that success for now and let this fail later when the code tries to create a file in this "directory"
// (we don't want to overwrite/delete whatever that unknown file may be, and this is similar to other cases
// we can't control where the file system is manipulated concurrently with and separately from this code).
Interop.Sys.FileStatus ignored;
bool pathExists = Interop.Sys.Stat(fullPath, out ignored) == 0;
if (!pathExists)
{
// We need to build up the directory manually. Ensure we have the temp directory in which
// we'll create the structure, e.g. /tmp/
if (tempPath == null)
{
tempPath = Path.GetTempPath();
}
Debug.Assert(Interop.Sys.Stat(tempPath, out ignored) == 0, "Path.GetTempPath() directory could not be accessed");
// Create /tmp/.dotnet/ if it doesn't exist.
string partialPath = Path.Combine(tempPath, PersistedFiles.TopLevelHiddenDirectory);
CreateDirectory(partialPath);
// Create /tmp/.dotnet/corefx/ if it doesn't exist
partialPath = Path.Combine(partialPath, PersistedFiles.SecondLevelDirectory);
CreateDirectory(partialPath);
// Create /tmp/.dotnet/corefx/pipes/ if it doesn't exist
CreateDirectory(fullPath);
}
return fullPath;
}
private static void CreateDirectory(string directoryPath)
{
int result = Interop.Sys.MkDir(directoryPath, (int)Interop.Sys.Permissions.S_IRWXU);
// If successful created, we're done.
if (result >= 0)
return;
// If the directory already exists, consider it a success.
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
if (errorInfo.Error == Interop.Error.EEXIST)
return;
// Otherwise, fail.
throw Interop.GetExceptionForIoErrno(errorInfo, directoryPath, isDirectory: true);
}
internal static Interop.Sys.OpenFlags TranslateFlags(PipeDirection direction, PipeOptions options, HandleInheritability inheritability)
{
// Translate direction
Interop.Sys.OpenFlags flags =
direction == PipeDirection.InOut ? Interop.Sys.OpenFlags.O_RDWR :
direction == PipeDirection.Out ? Interop.Sys.OpenFlags.O_WRONLY :
Interop.Sys.OpenFlags.O_RDONLY;
// Translate options
if ((options & PipeOptions.WriteThrough) != 0)
{
flags |= Interop.Sys.OpenFlags.O_SYNC;
}
// Translate inheritability.
if ((inheritability & HandleInheritability.Inheritable) == 0)
{
flags |= Interop.Sys.OpenFlags.O_CLOEXEC;
}
// PipeOptions.Asynchronous is ignored, at least for now. Asynchronous processing
// is handling just by queueing a work item to do the work synchronously on a pool thread.
return flags;
}
private unsafe int ReadCoreNoCancellation(byte[] buffer, int offset, int count)
{
DebugAssertReadWriteArgs(buffer, offset, count, _handle);
fixed (byte* bufPtr = buffer)
{
int result = CheckPipeCall(Interop.Sys.Read(_handle, bufPtr + offset, count));
Debug.Assert(result <= count);
return result;
}
}
private unsafe int ReadCoreWithCancellation(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
DebugAssertReadWriteArgs(buffer, offset, count, _handle);
Debug.Assert(cancellationToken.CanBeCanceled, "ReadCoreNoCancellation should be used if cancellation can't happen");
// Register for a cancellation request. This will throw if cancellation has already been requested,
// and otherwise will write to the cancellation pipe if/when cancellation has been requested.
using (DescriptorCancellationRegistration cancellation = RegisterForCancellation(cancellationToken))
{
bool gotRef = false;
try
{
cancellation.Poll.DangerousAddRef(ref gotRef);
fixed (byte* bufPtr = buffer)
{
// We want to wait for data to be available on either the pipe we want to read from
// or on the cancellation pipe, which would signal a cancellation request.
Interop.Sys.PollEvent* events = stackalloc Interop.Sys.PollEvent[2];
events[0] = new Interop.Sys.PollEvent
{
FileDescriptor = (int)_handle.DangerousGetHandle(),
Events = Interop.Sys.PollEvents.POLLIN
};
events[1] = new Interop.Sys.PollEvent
{
FileDescriptor = (int)cancellation.Poll.DangerousGetHandle(),
Events = Interop.Sys.PollEvents.POLLIN
};
// Some systems (at least OS X) appear to have a race condition in poll with FIFOs where the poll can
// end up not noticing writes of greater than the internal buffer size. Restarting the poll causes it
// to notice. To deal with that, we loop around poll, first starting with a small timeout and backing off
// to a larger one. This ensures we'll at least eventually notice such changes in these corner
// cases, while not adding too much overhead on systems that don't suffer from the problem.
const int InitialMsTimeout = 30, MaxMsTimeout = 2000;
for (int timeout = InitialMsTimeout; ; timeout = Math.Min(timeout * 2, MaxMsTimeout))
{
// Do the poll.
uint signaledFdCount;
Interop.CheckIo(Interop.Sys.Poll(events, 2, timeout, &signaledFdCount));
cancellationToken.ThrowIfCancellationRequested();
if (signaledFdCount != 0)
{
// Our pipe is ready. Break out of the loop to read from it. The fd may have been signaled due to
// POLLIN (data available), POLLHUP (hang-up), POLLERR (some error on the stream), etc... any such
// data will be propagated to us when we do the actual read.
break;
}
}
// Read it.
int result = CheckPipeCall(Interop.Sys.Read(_handle, bufPtr + offset, count));
Debug.Assert(result >= 0 && result <= count, "Expected 0 <= result <= count bytes, got " + result);
// return what we read.
return result;
}
}
finally
{
if (gotRef)
cancellation.Poll.DangerousRelease();
}
}
}
private unsafe void WriteCoreNoCancellation(byte[] buffer, int offset, int count)
{
DebugAssertReadWriteArgs(buffer, offset, count, _handle);
fixed (byte* bufPtr = buffer)
{
while (count > 0)
{
int bytesWritten = CheckPipeCall(Interop.Sys.Write(_handle, bufPtr + offset, count));
Debug.Assert(bytesWritten <= count);
count -= bytesWritten;
offset += bytesWritten;
}
}
}
private void WriteCoreWithCancellation(byte[] buffer, int offset, int count, CancellationToken cancellationToken)
{
DebugAssertReadWriteArgs(buffer, offset, count, _handle);
// NOTE:
// We currently ignore cancellationToken. Unlike on Windows, writes to pipes on Unix are likely to succeed
// immediately, even if no reader is currently listening, as long as there's room in the kernel buffer.
// However it's still possible for write to block if the buffer is full. We could try using a poll mechanism
// like we do for read, but checking for POLLOUT is only going to tell us that there's room to write at least
// one byte to the pipe, not enough room to write enough that we won't block. The only way to guarantee
// that would seem to be writing one byte at a time, which has huge overheads when writing large chunks of data.
// Given all this, at least for now we just do an initial check for cancellation and then call to the
// non -cancelable version.
cancellationToken.ThrowIfCancellationRequested();
WriteCoreNoCancellation(buffer, offset, count);
}
/// <summary>Creates an anonymous pipe.</summary>
/// <param name="inheritability">The inheritability to try to use. This may not always be honored, depending on platform.</param>
/// <param name="reader">The resulting reader end of the pipe.</param>
/// <param name="writer">The resulting writer end of the pipe.</param>
internal static unsafe void CreateAnonymousPipe(
HandleInheritability inheritability, out SafePipeHandle reader, out SafePipeHandle writer)
{
// Allocate the safe handle objects prior to calling pipe/pipe2, in order to help slightly in low-mem situations
reader = new SafePipeHandle();
writer = new SafePipeHandle();
// Create the OS pipe
int* fds = stackalloc int[2];
CreateAnonymousPipe(inheritability, fds);
// Store the file descriptors into our safe handles
reader.SetHandle(fds[Interop.Sys.ReadEndOfPipe]);
writer.SetHandle(fds[Interop.Sys.WriteEndOfPipe]);
}
/// <summary>
/// Creates a cancellation registration. This creates a pipe that'll have one end written to
/// when cancellation is requested. The other end can be poll'd to see when cancellation has occurred.
/// </summary>
private static unsafe DescriptorCancellationRegistration RegisterForCancellation(CancellationToken cancellationToken)
{
Debug.Assert(cancellationToken.CanBeCanceled);
// Fast path: before doing any real work, throw if cancellation has already been requested
cancellationToken.ThrowIfCancellationRequested();
// Create a pipe we can use to send a signal to the reader/writer
// to wake it up if blocked.
SafePipeHandle poll, send;
CreateAnonymousPipe(HandleInheritability.None, out poll, out send);
// Register a cancellation callback to send a byte to the cancellation pipe
CancellationTokenRegistration reg = cancellationToken.Register(s =>
{
SafePipeHandle sendRef = (SafePipeHandle)s;
byte b = 1;
Interop.CheckIo(Interop.Sys.Write(sendRef, &b, 1));
}, send);
// Return a disposable struct that will unregister the cancellation registration
// and dispose of the pipe.
return new DescriptorCancellationRegistration(reg, poll, send);
}
/// <summary>Disposable struct that'll clean up the results of a RegisterForCancellation operation.</summary>
private struct DescriptorCancellationRegistration : IDisposable
{
private CancellationTokenRegistration _registration;
private readonly SafePipeHandle _poll;
private readonly SafePipeHandle _send;
internal DescriptorCancellationRegistration(
CancellationTokenRegistration registration,
SafePipeHandle poll, SafePipeHandle send)
{
Debug.Assert(poll != null);
Debug.Assert(send != null);
_registration = registration;
_poll = poll;
_send = send;
}
internal SafePipeHandle Poll { get { return _poll; } }
public void Dispose()
{
// Dispose the registration prior to disposing of the pipe handles.
// Otherwise a concurrent cancellation request could try to use
// the already disposed pipe.
_registration.Dispose();
if (_send != null)
_send.Dispose();
if (_poll != null)
_poll.Dispose();
}
}
private int CheckPipeCall(int result)
{
if (result == -1)
{
Interop.ErrorInfo errorInfo = Interop.Sys.GetLastErrorInfo();
if (errorInfo.Error == Interop.Error.EPIPE)
State = PipeState.Broken;
throw Interop.GetExceptionForIoErrno(errorInfo);
}
return result;
}
internal void InitializeBufferSize(SafePipeHandle handle, int bufferSize)
{
// bufferSize is just advisory and ignored if platform does not support setting pipe capacity via fcntl.
if (bufferSize > 0 && Interop.Sys.Fcntl.CanGetSetPipeSz)
{
CheckPipeCall(Interop.Sys.Fcntl.SetPipeSz(handle, bufferSize));
}
}
private int GetPipeBufferSize()
{
if (!Interop.Sys.Fcntl.CanGetSetPipeSz)
{
throw new PlatformNotSupportedException();
}
// If we have a handle, get the capacity of the pipe (there's no distinction between in/out direction).
// If we don't, the pipe has been created but not yet connected (in the case of named pipes),
// so just return the buffer size that was passed to the constructor.
return _handle != null ?
CheckPipeCall(Interop.Sys.Fcntl.GetPipeSz(_handle)) :
_outBufferSize;
}
internal static unsafe void CreateAnonymousPipe(HandleInheritability inheritability, int* fdsptr)
{
var flags = (inheritability & HandleInheritability.Inheritable) == 0 ?
Interop.Sys.PipeFlags.O_CLOEXEC : 0;
Interop.CheckIo(Interop.Sys.Pipe(fdsptr, flags));
}
}
}
| |
using System;
using System.ComponentModel;
using System.Linq;
using NetGore.Audio;
using SFML.Graphics;
namespace NetGore.World
{
/// <summary>
/// The base class of all entities, which are physical objects that reside in the virtual world, and can interact
/// with other entities.
/// </summary>
public abstract class Entity : ISpatial, IAudioEmitter, IDisposable
{
const string _entityCategoryString = "Entity";
#if !TOPDOWN
static readonly Vector2 _defaultGravity;
#endif
static readonly Vector2 _maxVelocity;
bool _isDisposed;
Vector2 _position;
Vector2 _size;
#if !TOPDOWN
Entity _standingOn;
#endif
Vector2 _velocity;
float _weight = 1.0f;
/// <summary>
/// Notifies listeners that the Entity is being disposed. This event is raised before any actual
/// disposing takes place, but the Entity's IsDisposed property will be true. This event
/// is guarenteed to only be raised once.
/// </summary>
[Browsable(false)]
public event TypedEventHandler<Entity> Disposed;
/// <summary>
/// Notifies listeners when this <see cref="ISpatial"/> has moved.
/// </summary>
[Browsable(false)]
public event TypedEventHandler<ISpatial, EventArgs<Vector2>> Moved;
/// <summary>
/// When overridden in the derived class, allows for handling for when the <see cref="ISpatial.Moved"/> event occurs.
/// This is the same as listening to the event directly, but with less overhead.
/// </summary>
/// <param name="oldPos">The old position.</param>
protected virtual void OnMoved(Vector2 oldPos)
{
}
/// <summary>
/// Notifies listeners when this <see cref="ISpatial"/> has been resized.
/// </summary>
[Browsable(false)]
public event TypedEventHandler<ISpatial, EventArgs<Vector2>> Resized;
/// <summary>
/// When overridden in the derived class, allows for handling for when the <see cref="ISpatial.Resized"/> event occurs.
/// This is the same as listening to the event directly, but with less overhead.
/// </summary>
/// <param name="oldSize">The old size.</param>
protected virtual void OnResized(Vector2 oldSize)
{
}
/// <summary>
/// Tries to move the <see cref="ISpatial"/>.
/// </summary>
/// <param name="newPos">The new position.</param>
/// <returns>True if the <see cref="ISpatial"/> was moved to the <paramref name="newPos"/>; otherwise false.</returns>
bool ISpatial.TryMove(Vector2 newPos)
{
Position = newPos;
return true;
}
/// <summary>
/// Gets if this <see cref="ISpatial"/> can ever be moved with <see cref="ISpatial.TryMove"/>.
/// </summary>
bool ISpatial.SupportsMove
{
get { return true; }
}
/// <summary>
/// Gets if this <see cref="ISpatial"/> can ever be resized with <see cref="ISpatial.TryResize"/>.
/// </summary>
bool ISpatial.SupportsResize
{
get { return true; }
}
/// <summary>
/// Initializes the <see cref="Entity"/> class.
/// </summary>
static Entity()
{
var settings = EngineSettings.Instance;
// Cache the settings we care about
#if !TOPDOWN
_defaultGravity = settings.Gravity;
#endif
_maxVelocity = settings.MaxVelocity;
}
/// <summary>
/// Initializes a new instance of the <see cref="Entity"/> class.
/// </summary>
protected Entity() : this(Vector2.Zero, Vector2.One)
{
}
/// <summary>
/// Releases unmanaged resources and performs other cleanup operations before the
/// <see cref="Entity"/> is reclaimed by garbage collection.
/// </summary>
~Entity()
{
if (IsDisposed)
return;
_isDisposed = true;
if (Disposed != null)
Disposed.Raise(this, EventArgs.Empty);
HandleDispose(false);
}
/// <summary>
/// Initializes a new instance of the <see cref="Entity"/> class.
/// </summary>
/// <param name="position">The initial world position.</param>
/// <param name="size">The initial size.</param>
protected Entity(Vector2 position, Vector2 size)
{
_position = position;
_size = size;
}
/// <summary>
/// Tries to resize the <see cref="ISpatial"/>.
/// </summary>
/// <param name="newSize">The new size.</param>
/// <returns>True if the <see cref="ISpatial"/> was resized to the <paramref name="newSize"/>; otherwise false.</returns>
bool ISpatial.TryResize(Vector2 newSize)
{
Size = newSize;
return true;
}
/// <summary>
/// Gets the position of the center of the Entity.
/// </summary>
[Browsable(false)]
public Vector2 Center
{
get { return Position + (Size / 2); }
}
/// <summary>
/// When overridden in the derived class, gets if this <see cref="Entity"/> will collide against
/// walls. If false, this <see cref="Entity"/> will pass through walls and completely ignore them.
/// </summary>
[Browsable(false)]
public abstract bool CollidesAgainstWalls { get; }
/// <summary>
/// Gets if this Entity has been disposed, or is in the process of being disposed.
/// </summary>
[Browsable(false)]
public bool IsDisposed
{
get { return _isDisposed; }
}
/// <summary>
/// Gets if this <see cref="Entity"/> is currently on the ground. For top-down, this will always return true.
/// </summary>
[Browsable(false)]
public bool IsOnGround
{
get
{
#if TOPDOWN
return true;
#else
return StandingOn != null;
#endif
}
}
/// <summary>
/// Gets the <see cref="WallEntityBase"/> that the <see cref="Entity"/> is standing on, or null if they are
/// not standing on anything (are not on the ground). If using a top-down perspective, this value is always null.
/// </summary>
[Browsable(false)]
public Entity StandingOn
{
get
{
#if TOPDOWN
return null;
#else
return _standingOn;
#endif
}
internal set
{
#if !TOPDOWN
if (StandingOn == value)
return;
_standingOn = value;
if (StandingOn != null)
{
_velocity.Y = 0;
// Move them so that they really are standing right on top of the entity
var newPos = new Vector2(Position.X, StandingOn.Position.Y - Size.Y);
SetPositionRaw(newPos);
}
#endif
}
}
/// <summary>
/// Gets a <see cref="Rectangle"/> that represents the world area that this <see cref="ISpatial"/> occupies.
/// </summary>
/// <returns>A <see cref="Rectangle"/> that represents the world area that this <see cref="ISpatial"/>
/// occupies.</returns>
public Rectangle ToRectangle()
{
return SpatialHelper.ToRectangle(this);
}
/// <summary>
/// Gets or sets the position of the top-left corner of the entity.
/// </summary>
[Category(_entityCategoryString)]
[DisplayName("Position")]
[Description("Location of the top-left corner of the Entity on the map.")]
[Browsable(true)]
public Vector2 Position
{
get { return _position; }
set { Teleport(value); }
}
Vector2 _lastPosition;
/// <summary>
/// Gets the position that this <see cref="Entity"/> was at on their last update. If they teleport, this
/// value will be set to the current position.
/// </summary>
[Browsable(false)]
public Vector2 LastPosition
{
get { return _lastPosition; }
}
/// <summary>
/// Gets or sets the size of the Entity.
/// </summary>
[Category(_entityCategoryString)]
[DisplayName("Size")]
[Description("Size of the Entity.")]
[Browsable(true)]
public Vector2 Size
{
get { return _size; }
set { Resize(value); }
}
/// <summary>
/// Gets the world coordinates of the bottom-right corner of this <see cref="ISpatial"/>.
/// </summary>
[Browsable(false)]
public Vector2 Max
{
get { return Position + Size; }
}
/// <summary>
/// Gets the velocity of the Entity.
/// </summary>
[Browsable(false)]
public Vector2 Velocity
{
get { return _velocity; }
}
/// <summary>
/// Gets or sets the weight of the Entity (used in gravity calculations).
/// </summary>
[Category(_entityCategoryString)]
[DisplayName("Weight")]
[Description("The weight of the Entity. Higher the weight, the greater the effects of the gravity, where 0 is unaffected by gravity.")]
[DefaultValue(0.0f)]
[Browsable(true)]
public virtual float Weight
{
get { return _weight; }
set { _weight = value; }
}
/// <summary>
/// Handles when another Entity collides into us. Not synonymous CollideInto since the
/// <paramref name="collider"/> Entity is the one who collided into us. For example, if the
/// two entities in question were a moving Character and a stationary wall, this Entity would be
/// the Wall and <paramref name="collider"/> would be the Character.
/// </summary>
/// <param name="collider">Entity that collided into us.</param>
/// <param name="displacement">Displacement between the two Entities.</param>
public virtual void CollideFrom(Entity collider, Vector2 displacement)
{
}
/// <summary>
/// Handles when the Entity collides into another entity. Not synonymous with CollideFrom we
/// were the ones who collided into the <paramref name="collideWith"/> Entity. For example, if the
/// two Entities in question were a moving Character and a stationary Wall, this Entity would be
/// the Character and <paramref name="collideWith"/> would be the Wall.
/// </summary>
/// <param name="collideWith">Entity that this Entity collided with.</param>
/// <param name="displacement">Displacement between the two Entities.</param>
public virtual void CollideInto(Entity collideWith, Vector2 displacement)
{
}
/// <summary>
/// Performs the actual disposing of the Entity. This is called by the base Entity class when
/// a request has been made to dispose of the Entity. This is guarenteed to only be called once.
/// All classes that override this method should be sure to call base.DisposeHandler() after
/// handling what it needs to dispose.
/// </summary>
/// <param name="disposeManaged">When true, <see cref="IDisposable.Dispose"/> was explicitly called and managed resources need to be
/// disposed. When false, managed resources do not need to be disposed since this object was garbage-collected.</param>
protected virtual void HandleDispose(bool disposeManaged)
{
}
/// <summary>
/// Handles updating this <see cref="Entity"/>.
/// </summary>
/// <param name="imap">The map the <see cref="Entity"/> is on.</param>
/// <param name="deltaTime">The amount of time (in milliseconds) that has elapsed since the last update.</param>
protected virtual void HandleUpdate(IMap imap, int deltaTime)
{
// If moving, perform collision detection
if (Velocity != Vector2.Zero)
imap.CheckCollisions(this);
#if !TOPDOWN
// If the entity is standing on a wall, make sure they are still standing on it. If they aren't, check if they
// are standing on top of something else.
if (StandingOn != null)
{
if (!WallEntityBase.IsEntityStandingOn(StandingOn, this))
StandingOn = imap.FindStandingOn(this);
}
#endif
}
/// <summary>
/// Translates the entity from its current position.
/// </summary>
/// <param name="adjustment">Amount to move.</param>
public virtual void Move(Vector2 adjustment)
{
if (adjustment == Vector2.Zero)
return;
var newPos = Position + adjustment;
SetPositionRaw(newPos);
}
/// <summary>
/// Resizes the <see cref="Entity"/>.
/// </summary>
/// <param name="newSize">The new size of this <see cref="Entity"/>.</param>
protected virtual void Resize(Vector2 newSize)
{
SetSizeRaw(newSize);
}
/// <summary>
/// Sets the <see cref="Entity"/>'s <see cref="Position"/> directly without any chance to be overridden.
/// This should only be used for synchronization.
/// </summary>
/// <param name="newPosition">The new <see cref="Position"/> value.</param>
protected internal void SetPositionRaw(Vector2 newPosition)
{
if (newPosition == Position)
return;
var oldPos = Position;
_position = newPosition;
// Notify listeners
OnMoved(oldPos);
if (Moved != null)
Moved.Raise(this, EventArgsHelper.Create(oldPos));
}
/// <summary>
/// Sets the <see cref="Entity"/>'s <see cref="Size"/> directly without any chance to be overridden.
/// This should only be used for synchronization.
/// </summary>
/// <param name="newSize">The new <see cref="Size"/> value.</param>
protected internal void SetSizeRaw(Vector2 newSize)
{
if (newSize == Size)
return;
var oldSize = Size;
_size = newSize;
// Notify listeners
OnResized(oldSize);
if (Resized != null)
Resized.Raise(this, EventArgsHelper.Create(oldSize));
}
/// <summary>
/// Sets the velocity of the <see cref="Entity"/>.
/// </summary>
/// <param name="newVelocity">The new velocity.</param>
public virtual void SetVelocity(Vector2 newVelocity)
{
_velocity = newVelocity;
}
/// <summary>
/// Sets the <see cref="Entity"/>'s <see cref="Velocity"/> directly without any chance to be overridden.
/// This should only be used for synchronization.
/// </summary>
/// <param name="newVelocity">The new <see cref="Velocity"/> value.</param>
protected internal void SetVelocityRaw(Vector2 newVelocity)
{
_velocity = newVelocity;
}
/// <summary>
/// Sets the <see cref="Entity"/>'s <see cref="Weight"/> directly without any chance to be overridden.
/// This should only be used for synchronization.
/// </summary>
/// <param name="newWeight">The new <see cref="Weight"/> value.</param>
protected internal void SetWeightRaw(float newWeight)
{
_weight = newWeight;
}
/// <summary>
/// Moves the <see cref="Entity"/> to a new location instantly.
/// </summary>
/// <param name="newPosition">New position for the <see cref="Entity"/>.</param>
protected virtual void Teleport(Vector2 newPosition)
{
// Do not update if we're already at the specified position
if (newPosition == Position)
return;
// Assume they are not on the ground after teleporting
StandingOn = null;
// Move the entity
SetPositionRaw(newPosition);
}
/// <summary>
/// Perform pre-collision velocity and position updating.
/// </summary>
/// <param name="map">The map.</param>
/// <param name="deltaTime">The amount of that that has elapsed time since last update.</param>
public virtual void UpdateVelocity(IMap map, int deltaTime)
{
_lastPosition = Position;
// Only perform movement if moving
if (IsOnGround && Velocity == Vector2.Zero)
return;
#if !TOPDOWN
Vector2 gravity;
if (map == null)
gravity = _defaultGravity;
else
gravity = map.Gravity;
if (StandingOn != null)
{
if (!WallEntityBase.IsEntityStandingOn(StandingOn, this))
StandingOn = null;
}
if (StandingOn == null)
{
// Increase the velocity by the gravity
var displacement = gravity * (Weight * deltaTime);
Vector2.Add(ref _velocity, ref displacement, out _velocity);
}
#endif
// Check for surpassing the maximum velocity
if (_velocity.X > _maxVelocity.X)
_velocity.X = _maxVelocity.X;
else if (_velocity.X < -_maxVelocity.X)
_velocity.X = -_maxVelocity.X;
if (_velocity.Y > _maxVelocity.Y)
_velocity.Y = _maxVelocity.Y;
else if (_velocity.Y < -_maxVelocity.Y)
_velocity.Y = -_maxVelocity.Y;
// Move according to the velocity
Move(_velocity * deltaTime);
}
#region IDisposable Members
/// <summary>
/// Disposes of the <see cref="Entity"/>.
/// </summary>
public void Dispose()
{
// Check if the Entity has already been disposed
if (IsDisposed)
return;
GC.SuppressFinalize(this);
_isDisposed = true;
// Notify listeners that the Entity is being disposed
if (Disposed != null)
Disposed.Raise(this, EventArgs.Empty);
// Handle the disposing
HandleDispose(true);
}
#endregion
}
}
| |
// Copyright (c) Microsoft. All Rights Reserved. Licensed under the Apache License, Version 2.0. See License.txt in the project root for license information.
using System.Collections.Generic;
using System.Linq;
using System.Threading;
using System.Windows.Threading;
using Microsoft.CodeAnalysis.Editor.Shared.Options;
using Microsoft.CodeAnalysis.Editor.Shared.Tagging;
using Microsoft.CodeAnalysis.Internal.Log;
using Microsoft.CodeAnalysis.Options;
using Microsoft.CodeAnalysis.Shared.TestHooks;
using Microsoft.VisualStudio.Text;
using Microsoft.VisualStudio.Text.Editor;
using Microsoft.VisualStudio.Text.Projection;
using Microsoft.VisualStudio.Text.Tagging;
using Roslyn.Utilities;
namespace Microsoft.CodeAnalysis.Editor.Implementation.LineSeparators
{
/// <summary>
/// UI manager for graphic overlay tags. These tags will simply paint something related to the text.
/// </summary>
internal class AdornmentManager<T> where T : GraphicsTag
{
private readonly object _invalidatedSpansLock = new object();
/// <summary>View that created us.</summary>
private readonly IWpfTextView _textView;
/// <summary>Layer where we draw adornments.</summary>
private readonly IAdornmentLayer _adornmentLayer;
/// <summary>Aggregator that tells us where to draw.</summary>
private readonly ITagAggregator<T> _tagAggregator;
/// <summary>Notification system about operations we do</summary>
private readonly IAsynchronousOperationListener _asyncListener;
/// <summary>Spans that are invalidated, and need to be removed from the layer..</summary>
private List<IMappingSpan> _invalidatedSpans;
public static AdornmentManager<T> Create(
IWpfTextView textView,
IViewTagAggregatorFactoryService aggregatorService,
IAsynchronousOperationListener asyncListener,
string adornmentLayerName)
{
Contract.ThrowIfNull(textView);
Contract.ThrowIfNull(aggregatorService);
Contract.ThrowIfNull(adornmentLayerName);
Contract.ThrowIfNull(asyncListener);
return new AdornmentManager<T>(textView, aggregatorService, asyncListener, adornmentLayerName);
}
internal AdornmentManager(
IWpfTextView textView,
IViewTagAggregatorFactoryService tagAggregatorFactoryService,
IAsynchronousOperationListener asyncListener,
string adornmentLayerName)
{
Contract.ThrowIfNull(textView);
Contract.ThrowIfNull(tagAggregatorFactoryService);
Contract.ThrowIfNull(adornmentLayerName);
Contract.ThrowIfNull(asyncListener);
_textView = textView;
_adornmentLayer = textView.GetAdornmentLayer(adornmentLayerName);
textView.LayoutChanged += OnLayoutChanged;
_asyncListener = asyncListener;
// If we are not on the UI thread, we are at race with Close, but we should be on UI thread
Contract.ThrowIfFalse(textView.VisualElement.Dispatcher.CheckAccess());
textView.Closed += OnTextViewClosed;
_tagAggregator = tagAggregatorFactoryService.CreateTagAggregator<T>(textView);
_tagAggregator.TagsChanged += OnTagsChanged;
}
private void OnTextViewClosed(object sender, System.EventArgs e)
{
// release the aggregator
_tagAggregator.TagsChanged -= OnTagsChanged;
_tagAggregator.Dispose();
// unhook from view
_textView.Closed -= OnTextViewClosed;
_textView.LayoutChanged -= OnLayoutChanged;
// At this point, this object should be available for garbage collection.
}
/// <summary>
/// This handler gets called whenever there is a visual change in the view.
/// Example: edit or a scroll.
/// </summary>
private void OnLayoutChanged(object sender, TextViewLayoutChangedEventArgs e)
{
using (Logger.LogBlock(FunctionId.Tagger_AdornmentManager_OnLayoutChanged, CancellationToken.None))
using (_asyncListener.BeginAsyncOperation(GetType() + ".OnLayoutChanged"))
{
// Make sure we're on the UI thread.
Contract.ThrowIfFalse(_textView.VisualElement.Dispatcher.CheckAccess());
var reformattedSpans = e.NewOrReformattedSpans;
var viewSnapshot = _textView.TextSnapshot;
// No need to remove tags as these spans are reformatted anyways.
UpdateSpans_CallOnlyOnUIThread(reformattedSpans, removeOldTags: false);
// Compute any spans that had been invalidated but were not affected by layout.
List<IMappingSpan> invalidated;
lock (_invalidatedSpansLock)
{
invalidated = _invalidatedSpans;
_invalidatedSpans = null;
}
if (invalidated != null)
{
var invalidatedAndNormalized = TranslateAndNormalize(invalidated, viewSnapshot);
var invalidatedButNotReformatted = NormalizedSnapshotSpanCollection.Difference(
invalidatedAndNormalized,
e.NewOrReformattedSpans);
UpdateSpans_CallOnlyOnUIThread(invalidatedButNotReformatted, removeOldTags: true);
}
}
}
private static NormalizedSnapshotSpanCollection TranslateAndNormalize(
IEnumerable<IMappingSpan> spans,
ITextSnapshot targetSnapshot)
{
Contract.ThrowIfNull(spans);
var translated = spans.SelectMany(span => span.GetSpans(targetSnapshot));
return new NormalizedSnapshotSpanCollection(translated);
}
/// <summary>
/// This handler is called when tag aggregator notifies us about tag changes.
/// </summary>
private void OnTagsChanged(object sender, TagsChangedEventArgs e)
{
using (_asyncListener.BeginAsyncOperation(GetType().Name + ".OnTagsChanged.1"))
{
var changedSpan = e.Span;
if (changedSpan == null)
{
return; // nothing changed
}
var needToScheduleUpdate = false;
lock (_invalidatedSpansLock)
{
if (_invalidatedSpans == null)
{
// set invalidated spans
var newInvalidatedSpans = new List<IMappingSpan>();
newInvalidatedSpans.Add(changedSpan);
_invalidatedSpans = newInvalidatedSpans;
needToScheduleUpdate = true;
}
else
{
// add to existing invalidated spans
_invalidatedSpans.Add(changedSpan);
}
}
if (needToScheduleUpdate)
{
// schedule an update
var asyncToken = _asyncListener.BeginAsyncOperation(GetType() + ".OnTagsChanged.2");
_textView.VisualElement.Dispatcher.BeginInvoke(
new System.Action(() =>
{
try
{
UpdateInvalidSpans();
}
finally
{
asyncToken.Dispose();
}
}), DispatcherPriority.Render);
}
}
}
/// <summary>
/// MUST BE CALLED ON UI THREAD!!!! This method touches WPF.
///
/// This function is used to update invalidates spans.
/// </summary>
private void UpdateInvalidSpans()
{
using (_asyncListener.BeginAsyncOperation(GetType().Name + ".UpdateInvalidSpans.1"))
using (Logger.LogBlock(FunctionId.Tagger_AdornmentManager_UpdateInvalidSpans, CancellationToken.None))
{
// this method should only run on UI thread as we do WPF here.
Contract.ThrowIfFalse(_textView.VisualElement.Dispatcher.CheckAccess());
List<IMappingSpan> invalidated;
lock (_invalidatedSpansLock)
{
invalidated = _invalidatedSpans;
_invalidatedSpans = null;
}
if (_textView.IsClosed)
{
return; // already closed
}
if (invalidated != null)
{
var viewSnapshot = _textView.TextSnapshot;
var invalidatedNormalized = TranslateAndNormalize(invalidated, viewSnapshot);
UpdateSpans_CallOnlyOnUIThread(invalidatedNormalized, removeOldTags: true);
}
}
}
/// <summary>
/// MUST BE CALLED ON UI THREAD!!!! This method touches WPF.
///
/// This is where we apply visuals to the text.
///
/// It happens when another region of the view becomes visible or there is a change in tags.
/// For us the end result is the same - get tags from tagger and update visuals correspondingly.
/// </summary>
private void UpdateSpans_CallOnlyOnUIThread(NormalizedSnapshotSpanCollection changedSpanCollection, bool removeOldTags)
{
Contract.ThrowIfNull(changedSpanCollection);
// this method should only run on UI thread as we do WPF here.
Contract.ThrowIfFalse(_textView.VisualElement.Dispatcher.CheckAccess());
var viewSnapshot = _textView.TextSnapshot;
var visualSnapshot = _textView.VisualSnapshot;
var viewLines = _textView.TextViewLines;
if (viewLines == null || viewLines.Count == 0)
{
return; // nothing to draw on
}
// removing is a separate pass from adding so that new stuff is not removed.
if (removeOldTags)
{
foreach (var changedSpan in changedSpanCollection)
{
// is there any effect on the view?
if (viewLines.IntersectsBufferSpan(changedSpan))
{
_adornmentLayer.RemoveAdornmentsByVisualSpan(changedSpan);
}
}
}
foreach (var changedSpan in changedSpanCollection)
{
// is there any effect on the view?
if (!viewLines.IntersectsBufferSpan(changedSpan))
{
continue;
}
var tagSpans = _tagAggregator.GetTags(changedSpan);
foreach (var tagMappingSpan in tagSpans)
{
// We don't want to draw line separators if they would intersect a collapsed outlining
// region. So we test if we can map the start of the line separator up to our visual
// snapshot. If we can't, then we just skip it.
var point = tagMappingSpan.Span.Start.GetPoint(changedSpan.Snapshot, PositionAffinity.Predecessor);
if (point == null)
{
continue;
}
var mappedPoint = _textView.BufferGraph.MapUpToSnapshot(
point.Value, PointTrackingMode.Negative, PositionAffinity.Predecessor, _textView.VisualSnapshot);
if (mappedPoint == null)
{
continue;
}
SnapshotSpan span;
if (!TryMapToSingleSnapshotSpan(tagMappingSpan.Span, viewSnapshot, out span))
{
continue;
}
if (!viewLines.IntersectsBufferSpan(span))
{
// span is outside of the view so we will not get geometry for it, but may
// spent a lot of time trying.
continue;
}
// add the visual to the adornment layer.
var geometry = viewLines.GetMarkerGeometry(span);
if (geometry != null)
{
var tag = tagMappingSpan.Tag;
var graphicsResult = tag.GetGraphics(_textView, geometry);
_adornmentLayer.AddAdornment(
behavior: AdornmentPositioningBehavior.TextRelative,
visualSpan: span,
tag: tag,
adornment: graphicsResult.VisualElement,
removedCallback: delegate { graphicsResult.Dispose(); });
}
}
}
}
// Map the mapping span to the visual snapshot. note that as a result of projection
// topology, originally single span may be mapped into several spans. Visual adornments do
// not make much sense on disjoint spans. We will not decorate spans that could not make it
// in one piece.
private bool TryMapToSingleSnapshotSpan(IMappingSpan mappingSpan, ITextSnapshot viewSnapshot, out SnapshotSpan span)
{
// IMappingSpan.GetSpans is a surprisingly expensive function that allocates multiple
// lists and collection if the view buffer is same as anchor we could just map the
// anchor to the viewSnapshot however, since the _anchor is not available, we have to
// map start and end TODO: verify that affinity is correct. If it does not matter we
// should use the cheapest.
if (viewSnapshot != null && mappingSpan.AnchorBuffer == viewSnapshot.TextBuffer)
{
var mappedStart = mappingSpan.Start.GetPoint(viewSnapshot, PositionAffinity.Predecessor).Value;
var mappedEnd = mappingSpan.End.GetPoint(viewSnapshot, PositionAffinity.Successor).Value;
span = new SnapshotSpan(mappedStart, mappedEnd);
return true;
}
// TODO: actually adornments do not make much sense on "cropped" spans either - Consider line separator on "nd Su"
// is it possible to cheaply detect cropping?
var spans = mappingSpan.GetSpans(viewSnapshot);
if (spans.Count != 1)
{
span = default(SnapshotSpan);
return false; // span is unmapped or disjoint.
}
span = spans[0];
return true;
}
}
}
| |
// ==++==
//
// Copyright (c) Microsoft Corporation. All rights reserved.
//
// ==--==g
/*============================================================
**
** Class: File
**
** <OWNER>[....]</OWNER>
**
**
** Purpose: Long paths
**
===========================================================*/
using System;
using System.Security.Permissions;
using PermissionSet = System.Security.PermissionSet;
using Win32Native = Microsoft.Win32.Win32Native;
using System.Runtime.InteropServices;
using System.Security;
#if FEATURE_MACL
using System.Security.AccessControl;
#endif
using System.Text;
using Microsoft.Win32.SafeHandles;
using System.Collections.Generic;
using System.Globalization;
using System.Runtime.Versioning;
using System.Diagnostics.Contracts;
namespace System.IO {
[ComVisible(false)]
static class LongPath
{
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal unsafe static String NormalizePath(String path)
{
Contract.Requires(path != null);
return NormalizePath(path, true);
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal unsafe static String NormalizePath(String path, bool fullCheck)
{
Contract.Requires(path != null);
return Path.NormalizePath(path, fullCheck, Path.MaxLongPath);
}
internal static String InternalCombine(String path1, String path2)
{
Contract.Requires(path1 != null);
Contract.Requires(path2 != null);
Contract.Requires(path2.Length != 0);
Contract.Requires(!IsPathRooted(path2));
bool removedPrefix;
String tempPath1 = TryRemoveLongPathPrefix(path1, out removedPrefix);
String tempResult = Path.InternalCombine(tempPath1, path2);
if (removedPrefix)
{
tempResult = Path.AddLongPathPrefix(tempResult);
}
return tempResult;
}
internal static int GetRootLength(String path)
{
bool removedPrefix;
String tempPath = TryRemoveLongPathPrefix(path, out removedPrefix);
int root = Path.GetRootLength(tempPath);
if (removedPrefix)
{
root += 4;
}
return root;
}
// Tests if the given path contains a root. A path is considered rooted
// if it starts with a backslash ("\") or a drive letter and a colon (":").
//
[Pure]
internal static bool IsPathRooted(String path)
{
Contract.Requires(path != null);
String tempPath = Path.RemoveLongPathPrefix(path);
return Path.IsPathRooted(tempPath);
}
// Returns the root portion of the given path. The resulting string
// consists of those rightmost characters of the path that constitute the
// root of the path. Possible patterns for the resulting string are: An
// empty string (a relative path on the current drive), "\" (an absolute
// path on the current drive), "X:" (a relative path on a given drive,
// where X is the drive letter), "X:\" (an absolute path on a given drive),
// and "\\server\share" (a UNC path for a given server and share name).
// The resulting string is null if path is null.
//
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static String GetPathRoot(String path)
{
if (path == null) return null;
bool removedPrefix;
String tempPath = TryRemoveLongPathPrefix(path, out removedPrefix);
tempPath = NormalizePath(tempPath, false);
String result = path.Substring(0, GetRootLength(tempPath));
if (removedPrefix)
{
result = Path.AddLongPathPrefix(result);
}
return result;
}
// Returns the directory path of a file path. This method effectively
// removes the last element of the given file path, i.e. it returns a
// string consisting of all characters up to but not including the last
// backslash ("\") in the file path. The returned value is null if the file
// path is null or if the file path denotes a root (such as "\", "C:", or
// "\\server\share").
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.None)]
[ResourceConsumption(ResourceScope.Machine, ResourceScope.Machine)]
internal static String GetDirectoryName(String path)
{
if (path != null)
{
bool removedPrefix;
String tempPath = TryRemoveLongPathPrefix(path, out removedPrefix);
Path.CheckInvalidPathChars(tempPath);
path = NormalizePath(tempPath, false);
int root = GetRootLength(tempPath);
int i = tempPath.Length;
if (i > root)
{
i = tempPath.Length;
if (i == root) return null;
while (i > root && tempPath[--i] != Path.DirectorySeparatorChar && tempPath[i] != Path.AltDirectorySeparatorChar);
String result = tempPath.Substring(0, i);
if (removedPrefix)
{
result = Path.AddLongPathPrefix(result);
}
return result;
}
}
return null;
}
internal static String TryRemoveLongPathPrefix(String path, out bool removed)
{
Contract.Requires(path != null);
removed = Path.HasLongPathPrefix(path);
if (!removed)
return path;
return Path.RemoveLongPathPrefix(path);
}
}
[ComVisible(false)]
static class LongPathFile
{
// Copies an existing file to a new file. If overwrite is
// false, then an IOException is thrown if the destination file
// already exists. If overwrite is true, the file is
// overwritten.
//
// The caller must have certain FileIOPermissions. The caller must have
// Read permission to sourceFileName
// and Write permissions to destFileName.
//
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static void Copy(String sourceFileName, String destFileName, bool overwrite) {
Contract.Requires(sourceFileName != null);
Contract.Requires(destFileName != null);
Contract.Requires(sourceFileName.Length > 0);
Contract.Requires(destFileName.Length > 0);
String fullSourceFileName = LongPath.NormalizePath(sourceFileName);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { fullSourceFileName }, false, false).Demand();
String fullDestFileName = LongPath.NormalizePath(destFileName);
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { fullDestFileName }, false, false).Demand();
InternalCopy(fullSourceFileName, fullDestFileName, sourceFileName, destFileName, overwrite);
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
private static String InternalCopy(String fullSourceFileName, String fullDestFileName, String sourceFileName, String destFileName, bool overwrite) {
Contract.Requires(fullSourceFileName != null);
Contract.Requires(fullDestFileName != null);
Contract.Requires(fullSourceFileName.Length > 0);
Contract.Requires(fullDestFileName.Length > 0);
fullSourceFileName = Path.AddLongPathPrefix(fullSourceFileName);
fullDestFileName = Path.AddLongPathPrefix(fullDestFileName);
bool r = Win32Native.CopyFile(fullSourceFileName, fullDestFileName, !overwrite);
if (!r) {
// Save Win32 error because subsequent checks will overwrite this HRESULT.
int errorCode = Marshal.GetLastWin32Error();
String fileName = destFileName;
if (errorCode != Win32Native.ERROR_FILE_EXISTS) {
// For a number of error codes (sharing violation, path
// not found, etc) we don't know if the problem was with
// the source or dest file. Try reading the source file.
using(SafeFileHandle handle = Win32Native.UnsafeCreateFile(fullSourceFileName, FileStream.GENERIC_READ, FileShare.Read, null, FileMode.Open, 0, IntPtr.Zero)) {
if (handle.IsInvalid)
fileName = sourceFileName;
}
if (errorCode == Win32Native.ERROR_ACCESS_DENIED) {
if (LongPathDirectory.InternalExists(fullDestFileName))
throw new IOException(Environment.GetResourceString("Arg_FileIsDirectory_Name", destFileName), Win32Native.ERROR_ACCESS_DENIED, fullDestFileName);
}
}
__Error.WinIOError(errorCode, fileName);
}
return fullDestFileName;
}
// Deletes a file. The file specified by the designated path is deleted.
// If the file does not exist, Delete succeeds without throwing
// an exception.
//
// On NT, Delete will fail for a file that is open for normal I/O
// or a file that is memory mapped.
//
// Your application must have Delete permission to the target file.
//
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static void Delete(String path) {
Contract.Requires(path != null);
String fullPath = LongPath.NormalizePath(path);
// For security check, path should be resolved to an absolute path.
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { fullPath }, false, false ).Demand();
String tempPath = Path.AddLongPathPrefix(fullPath);
bool r = Win32Native.DeleteFile(tempPath);
if (!r) {
int hr = Marshal.GetLastWin32Error();
if (hr==Win32Native.ERROR_FILE_NOT_FOUND)
return;
else
__Error.WinIOError(hr, fullPath);
}
}
// Tests if a file exists. The result is true if the file
// given by the specified path exists; otherwise, the result is
// false. Note that if path describes a directory,
// Exists will return true.
//
// Your application must have Read permission for the target directory.
//
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static bool Exists(String path) {
try
{
if (path==null)
return false;
if (path.Length==0)
return false;
path = LongPath.NormalizePath(path);
// After normalizing, check whether path ends in directory separator.
// Otherwise, FillAttributeInfo removes it and we may return a false positive.
// GetFullPathInternal should never return null
Contract.Assert(path != null, "File.Exists: GetFullPathInternal returned null");
if (path.Length > 0 && Path.IsDirectorySeparator(path[path.Length - 1])) {
return false;
}
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { path }, false, false ).Demand();
return InternalExists(path);
}
catch(ArgumentException) {}
catch(NotSupportedException) {} // Security can throw this on ":"
catch(SecurityException) {}
catch(IOException) {}
catch(UnauthorizedAccessException) {}
return false;
}
[System.Security.SecurityCritical]
internal static bool InternalExists(String path) {
Contract.Requires(path != null);
String tempPath = Path.AddLongPathPrefix(path);
return File.InternalExists(tempPath);
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static DateTimeOffset GetCreationTime(String path)
{
Contract.Requires(path != null);
String fullPath = LongPath.NormalizePath(path);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { fullPath }, false, false ).Demand();
String tempPath = Path.AddLongPathPrefix(fullPath);
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
int dataInitialised = File.FillAttributeInfo(tempPath, ref data, false, false);
if (dataInitialised != 0)
__Error.WinIOError(dataInitialised, fullPath);
long dt = ((long)(data.ftCreationTimeHigh) << 32) | ((long)data.ftCreationTimeLow);
DateTime dtLocal = DateTime.FromFileTimeUtc(dt).ToLocalTime();
return new DateTimeOffset(dtLocal).ToLocalTime();
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static DateTimeOffset GetLastAccessTime(String path)
{
Contract.Requires(path != null);
String fullPath = LongPath.NormalizePath(path);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { fullPath }, false, false ).Demand();
String tempPath = Path.AddLongPathPrefix(fullPath);
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
int dataInitialised = File.FillAttributeInfo(tempPath, ref data, false, false);
if (dataInitialised != 0)
__Error.WinIOError(dataInitialised, fullPath);
long dt = ((long)(data.ftLastAccessTimeHigh) << 32) | ((long)data.ftLastAccessTimeLow);
DateTime dtLocal = DateTime.FromFileTimeUtc(dt).ToLocalTime();
return new DateTimeOffset(dtLocal).ToLocalTime();
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static DateTimeOffset GetLastWriteTime(String path)
{
Contract.Requires(path != null);
String fullPath = LongPath.NormalizePath(path);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { fullPath }, false, false ).Demand();
String tempPath = Path.AddLongPathPrefix(fullPath);
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
int dataInitialised = File.FillAttributeInfo(tempPath, ref data, false, false);
if (dataInitialised != 0)
__Error.WinIOError(dataInitialised, fullPath);
long dt = ((long)data.ftLastWriteTimeHigh << 32) | ((long)data.ftLastWriteTimeLow);
DateTime dtLocal = DateTime.FromFileTimeUtc(dt).ToLocalTime();
return new DateTimeOffset(dtLocal).ToLocalTime();
}
// Moves a specified file to a new location and potentially a new file name.
// This method does work across volumes.
//
// The caller must have certain FileIOPermissions. The caller must
// have Read and Write permission to
// sourceFileName and Write
// permissions to destFileName.
//
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static void Move(String sourceFileName, String destFileName) {
Contract.Requires(sourceFileName != null);
Contract.Requires(destFileName != null);
Contract.Requires(sourceFileName.Length > 0);
Contract.Requires(destFileName.Length > 0);
String fullSourceFileName = LongPath.NormalizePath(sourceFileName);
new FileIOPermission(FileIOPermissionAccess.Write | FileIOPermissionAccess.Read, new String[] { fullSourceFileName }, false, false).Demand();
String fullDestFileName = LongPath.NormalizePath(destFileName);
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { fullDestFileName }, false, false).Demand();
if (!LongPathFile.InternalExists(fullSourceFileName))
__Error.WinIOError(Win32Native.ERROR_FILE_NOT_FOUND, fullSourceFileName);
String tempSourceFileName = Path.AddLongPathPrefix(fullSourceFileName);
String tempDestFileName = Path.AddLongPathPrefix(fullDestFileName);
if (!Win32Native.MoveFile(tempSourceFileName, tempDestFileName))
{
__Error.WinIOError();
}
}
// throws FileNotFoundException if not found
[System.Security.SecurityCritical]
internal static long GetLength(String path)
{
Contract.Requires(path != null);
String fullPath = LongPath.NormalizePath(path);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { fullPath }, false, false ).Demand();
String tempPath = Path.AddLongPathPrefix(fullPath);
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
int dataInitialised = File.FillAttributeInfo(tempPath, ref data, false, true); // return error
if (dataInitialised != 0)
__Error.WinIOError(dataInitialised, path); // from FileInfo.
if ((data.fileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY) != 0)
__Error.WinIOError(Win32Native.ERROR_FILE_NOT_FOUND, path);
return ((long)data.fileSizeHigh) << 32 | ((long)data.fileSizeLow & 0xFFFFFFFFL);
}
// Defined in WinError.h
private const int ERROR_ACCESS_DENIED = 0x5;
}
[ComVisible(false)]
static class LongPathDirectory
{
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static void CreateDirectory(String path)
{
Contract.Requires(path != null);
Contract.Requires(path.Length > 0);
String fullPath = LongPath.NormalizePath(path);
// You need read access to the directory to be returned back and write access to all the directories
// that you need to create. If we fail any security checks we will not create any directories at all.
// We attempt to create directories only after all the security checks have passed. This is avoid doing
// a demand at every level.
String demandDir = GetDemandDir(fullPath, true);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { demandDir }, false, false).Demand();
InternalCreateDirectory(fullPath, path, null);
}
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
private unsafe static void InternalCreateDirectory(String fullPath, String path, Object dirSecurityObj)
{
#if FEATURE_MACL
DirectorySecurity dirSecurity = (DirectorySecurity)dirSecurityObj;
#endif // FEATURE_MACL
int length = fullPath.Length;
// We need to trim the trailing slash or the code will try to create 2 directories of the same name.
if (length >= 2 && Path.IsDirectorySeparator(fullPath[length - 1]))
length--;
int lengthRoot = LongPath.GetRootLength(fullPath);
// For UNC paths that are only // or ///
if (length == 2 && Path.IsDirectorySeparator(fullPath[1]))
throw new IOException(Environment.GetResourceString("IO.IO_CannotCreateDirectory", path));
List<string> stackDir = new List<string>();
// Attempt to figure out which directories don't exist, and only
// create the ones we need. Note that InternalExists may fail due
// to Win32 ACL's preventing us from seeing a directory, and this
// isn't threadsafe.
bool somepathexists = false;
if (length > lengthRoot)
{ // Special case root (fullpath = X:\\)
int i = length - 1;
while (i >= lengthRoot && !somepathexists)
{
String dir = fullPath.Substring(0, i + 1);
if (!InternalExists(dir)) // Create only the ones missing
stackDir.Add(dir);
else
somepathexists = true;
while (i > lengthRoot && fullPath[i] != Path.DirectorySeparatorChar && fullPath[i] != Path.AltDirectorySeparatorChar) i--;
i--;
}
}
int count = stackDir.Count;
if (stackDir.Count != 0)
{
String[] securityList = new String[stackDir.Count];
stackDir.CopyTo(securityList, 0);
for (int j = 0; j < securityList.Length; j++)
securityList[j] += "\\."; // leaf will never have a slash at the end
// Security check for all directories not present only.
#if !FEATURE_PAL && FEATURE_MACL
AccessControlActions control = (dirSecurity == null) ? AccessControlActions.None : AccessControlActions.Change;
new FileIOPermission(FileIOPermissionAccess.Write, control, securityList, false, false ).Demand();
#else
new FileIOPermission(FileIOPermissionAccess.Write, securityList, false, false).Demand();
#endif
}
// If we were passed a DirectorySecurity, convert it to a security
// descriptor and set it in he call to CreateDirectory.
Win32Native.SECURITY_ATTRIBUTES secAttrs = null;
#if FEATURE_MACL
if (dirSecurity != null) {
secAttrs = new Win32Native.SECURITY_ATTRIBUTES();
secAttrs.nLength = (int)Marshal.SizeOf(secAttrs);
// For ACL's, get the security descriptor from the FileSecurity.
byte[] sd = dirSecurity.GetSecurityDescriptorBinaryForm();
byte * bytesOnStack = stackalloc byte[sd.Length];
Buffer.Memcpy(bytesOnStack, 0, sd, 0, sd.Length);
secAttrs.pSecurityDescriptor = bytesOnStack;
}
#endif
bool r = true;
int firstError = 0;
String errorString = path;
// If all the security checks succeeded create all the directories
while (stackDir.Count > 0)
{
String name = stackDir[stackDir.Count - 1];
stackDir.RemoveAt(stackDir.Count - 1);
if (name.Length >= Path.MaxLongPath)
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
r = Win32Native.CreateDirectory(Path.AddLongPathPrefix(name), secAttrs);
if (!r && (firstError == 0))
{
int currentError = Marshal.GetLastWin32Error();
// While we tried to avoid creating directories that don't
// exist above, there are at least two cases that will
// cause us to see ERROR_ALREADY_EXISTS here. InternalExists
// can fail because we didn't have permission to the
// directory. Secondly, another thread or process could
// create the directory between the time we check and the
// time we try using the directory. Thirdly, it could
// fail because the target does exist, but is a file.
if (currentError != Win32Native.ERROR_ALREADY_EXISTS)
firstError = currentError;
else
{
// If there's a file in this directory's place, or if we have ERROR_ACCESS_DENIED when checking if the directory already exists throw.
if (LongPathFile.InternalExists(name) || (!InternalExists(name, out currentError) && currentError == Win32Native.ERROR_ACCESS_DENIED))
{
firstError = currentError;
// Give the user a nice error message, but don't leak path information.
try
{
new FileIOPermission(FileIOPermissionAccess.PathDiscovery, new String[] { GetDemandDir(name, true) }, false, false).Demand();
errorString = name;
}
catch (SecurityException) { }
}
}
}
}
// We need this check to mask OS differences
// Handle CreateDirectory("X:\\foo") when X: doesn't exist. Similarly for n/w paths.
if ((count == 0) && !somepathexists)
{
String root = InternalGetDirectoryRoot(fullPath);
if (!InternalExists(root))
{
// Extract the root from the passed in path again for security.
__Error.WinIOError(Win32Native.ERROR_PATH_NOT_FOUND, InternalGetDirectoryRoot(path));
}
return;
}
// Only throw an exception if creating the exact directory we
// wanted failed to work correctly.
if (!r && (firstError != 0))
{
__Error.WinIOError(firstError, errorString);
}
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static void Move(String sourceDirName, String destDirName)
{
Contract.Requires(sourceDirName != null);
Contract.Requires(destDirName != null);
Contract.Requires(sourceDirName.Length != 0);
Contract.Requires(destDirName.Length != 0);
String fullsourceDirName = LongPath.NormalizePath(sourceDirName);
String sourcePath = GetDemandDir(fullsourceDirName, false);
if (sourcePath.Length >= Path.MaxLongPath)
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
String fulldestDirName = LongPath.NormalizePath(destDirName);
String destPath = GetDemandDir(fulldestDirName, false);
if (destPath.Length >= Path.MaxLongPath)
throw new PathTooLongException(Environment.GetResourceString("IO.PathTooLong"));
new FileIOPermission(FileIOPermissionAccess.Write | FileIOPermissionAccess.Read, new String[] { sourcePath }, false, false).Demand();
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { destPath }, false, false).Demand();
if (String.Compare(sourcePath, destPath, StringComparison.OrdinalIgnoreCase) == 0)
throw new IOException(Environment.GetResourceString("IO.IO_SourceDestMustBeDifferent"));
String sourceRoot = LongPath.GetPathRoot(sourcePath);
String destinationRoot = LongPath.GetPathRoot(destPath);
if (String.Compare(sourceRoot, destinationRoot, StringComparison.OrdinalIgnoreCase) != 0)
throw new IOException(Environment.GetResourceString("IO.IO_SourceDestMustHaveSameRoot"));
String tempSourceDirName = Path.AddLongPathPrefix(sourceDirName);
String tempDestDirName = Path.AddLongPathPrefix(destDirName);
if (!Win32Native.MoveFile(tempSourceDirName, tempDestDirName))
{
int hr = Marshal.GetLastWin32Error();
if (hr == Win32Native.ERROR_FILE_NOT_FOUND) // Source dir not found
{
hr = Win32Native.ERROR_PATH_NOT_FOUND;
__Error.WinIOError(hr, fullsourceDirName);
}
// This check was originally put in for Win9x (unfortunately without special casing it to be for Win9x only). We can't change the NT codepath now for backcomp reasons.
if (hr == Win32Native.ERROR_ACCESS_DENIED) // WinNT throws IOException. This check is for Win9x. We can't change it for backcomp.
throw new IOException(Environment.GetResourceString("UnauthorizedAccess_IODenied_Path", sourceDirName), Win32Native.MakeHRFromErrorCode(hr));
__Error.WinIOError(hr, String.Empty);
}
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static void Delete(String path, bool recursive)
{
String fullPath = LongPath.NormalizePath(path);
InternalDelete(fullPath, path, recursive);
}
// FullPath is fully qualified, while the user path is used for feedback in exceptions
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
private static void InternalDelete(String fullPath, String userPath, bool recursive)
{
String demandPath;
// If not recursive, do permission check only on this directory
// else check for the whole directory structure rooted below
demandPath = GetDemandDir(fullPath, !recursive);
// Make sure we have write permission to this directory
new FileIOPermission(FileIOPermissionAccess.Write, new String[] { demandPath }, false, false).Demand();
String longPath = Path.AddLongPathPrefix(fullPath);
// Do not recursively delete through reparse points. Perhaps in a
// future version we will add a new flag to control this behavior,
// but for now we're much safer if we err on the conservative side.
// This applies to symbolic links and mount points.
Win32Native.WIN32_FILE_ATTRIBUTE_DATA data = new Win32Native.WIN32_FILE_ATTRIBUTE_DATA();
int dataInitialised = File.FillAttributeInfo(longPath, ref data, false, true);
if (dataInitialised != 0)
{
// Ensure we throw a DirectoryNotFoundException.
if (dataInitialised == Win32Native.ERROR_FILE_NOT_FOUND)
dataInitialised = Win32Native.ERROR_PATH_NOT_FOUND;
__Error.WinIOError(dataInitialised, fullPath);
}
if (((FileAttributes)data.fileAttributes & FileAttributes.ReparsePoint) != 0)
recursive = false;
DeleteHelper(longPath, userPath, recursive, true);
}
// Note that fullPath is fully qualified, while userPath may be
// relative. Use userPath for all exception messages to avoid leaking
// fully qualified path information.
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
private static void DeleteHelper(String fullPath, String userPath, bool recursive, bool throwOnTopLevelDirectoryNotFound)
{
bool r;
int hr;
Exception ex = null;
// Do not recursively delete through reparse points. Perhaps in a
// future version we will add a new flag to control this behavior,
// but for now we're much safer if we err on the conservative side.
// This applies to symbolic links and mount points.
// Note the logic to check whether fullPath is a reparse point is
// in Delete(String, String, bool), and will set "recursive" to false.
// Note that Win32's DeleteFile and RemoveDirectory will just delete
// the reparse point itself.
if (recursive)
{
Win32Native.WIN32_FIND_DATA data = new Win32Native.WIN32_FIND_DATA();
String searchPath = null;
if (fullPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal))
{
searchPath = fullPath + "*";
}
else
{
searchPath = fullPath + Path.DirectorySeparatorChar + "*";
}
// Open a Find handle
using (SafeFindHandle hnd = Win32Native.FindFirstFile(searchPath, data))
{
if (hnd.IsInvalid)
{
hr = Marshal.GetLastWin32Error();
__Error.WinIOError(hr, userPath);
}
do
{
bool isDir = (0 != (data.dwFileAttributes & Win32Native.FILE_ATTRIBUTE_DIRECTORY));
if (isDir)
{
// Skip ".", "..".
if (data.cFileName.Equals(".") || data.cFileName.Equals(".."))
continue;
// Recurse for all directories, unless they are
// reparse points. Do not follow mount points nor
// symbolic links, but do delete the reparse point
// itself.
bool shouldRecurse = (0 == (data.dwFileAttributes & (int)FileAttributes.ReparsePoint));
if (shouldRecurse)
{
String newFullPath = LongPath.InternalCombine(fullPath, data.cFileName);
String newUserPath = LongPath.InternalCombine(userPath, data.cFileName);
try
{
DeleteHelper(newFullPath, newUserPath, recursive, false);
}
catch (Exception e)
{
if (ex == null)
{
ex = e;
}
}
}
else
{
// Check to see if this is a mount point, and
// unmount it.
if (data.dwReserved0 == Win32Native.IO_REPARSE_TAG_MOUNT_POINT)
{
// Use full path plus a trailing '\'
String mountPoint = LongPath.InternalCombine(fullPath, data.cFileName + Path.DirectorySeparatorChar);
r = Win32Native.DeleteVolumeMountPoint(mountPoint);
if (!r)
{
hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_PATH_NOT_FOUND)
{
try
{
__Error.WinIOError(hr, data.cFileName);
}
catch (Exception e)
{
if (ex == null)
{
ex = e;
}
}
}
}
}
// RemoveDirectory on a symbolic link will
// remove the link itself.
String reparsePoint = LongPath.InternalCombine(fullPath, data.cFileName);
r = Win32Native.RemoveDirectory(reparsePoint);
if (!r)
{
hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_PATH_NOT_FOUND)
{
try
{
__Error.WinIOError(hr, data.cFileName);
}
catch (Exception e)
{
if (ex == null)
{
ex = e;
}
}
}
}
}
}
else
{
String fileName = LongPath.InternalCombine(fullPath, data.cFileName);
r = Win32Native.DeleteFile(fileName);
if (!r)
{
hr = Marshal.GetLastWin32Error();
if (hr != Win32Native.ERROR_FILE_NOT_FOUND)
{
try
{
__Error.WinIOError(hr, data.cFileName);
}
catch (Exception e)
{
if (ex == null)
{
ex = e;
}
}
}
}
}
} while (Win32Native.FindNextFile(hnd, data));
// Make sure we quit with a sensible error.
hr = Marshal.GetLastWin32Error();
}
if (ex != null)
throw ex;
if (hr != 0 && hr != Win32Native.ERROR_NO_MORE_FILES)
__Error.WinIOError(hr, userPath);
}
r = Win32Native.RemoveDirectory(fullPath);
if (!r)
{
hr = Marshal.GetLastWin32Error();
if (hr == Win32Native.ERROR_FILE_NOT_FOUND) // A dubious error code.
hr = Win32Native.ERROR_PATH_NOT_FOUND;
// This check was originally put in for Win9x (unfortunately without special casing it to be for Win9x only). We can't change the NT codepath now for backcomp reasons.
if (hr == Win32Native.ERROR_ACCESS_DENIED)
throw new IOException(Environment.GetResourceString("UnauthorizedAccess_IODenied_Path", userPath));
// don't throw the DirectoryNotFoundException since this is a subdir and there could be a ----
// between two Directory.Delete callers
if (hr == Win32Native.ERROR_PATH_NOT_FOUND && !throwOnTopLevelDirectoryNotFound)
return;
__Error.WinIOError(hr, userPath);
}
}
[System.Security.SecurityCritical]
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static bool Exists(String path)
{
try
{
if (path == null)
return false;
if (path.Length == 0)
return false;
// Get fully qualified file name ending in \* for security check
String fullPath = LongPath.NormalizePath(path);
String demandPath = GetDemandDir(fullPath, true);
new FileIOPermission(FileIOPermissionAccess.Read, new String[] { demandPath }, false, false).Demand();
return InternalExists(fullPath);
}
catch (ArgumentException) { }
catch (NotSupportedException) { } // Security can throw this on ":"
catch (SecurityException) { }
catch (IOException) { }
catch (UnauthorizedAccessException)
{
#if !FEATURE_PAL
Contract.Assert(false, "Ignore this assert and file a bug to the BCL team. This assert was tracking purposes only.");
#endif //!FEATURE_PAL
}
return false;
}
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static bool InternalExists(String path)
{
Contract.Requires(path != null);
int lastError = Win32Native.ERROR_SUCCESS;
return InternalExists(path, out lastError);
}
[System.Security.SecurityCritical] // auto-generated
[ResourceExposure(ResourceScope.Machine)]
[ResourceConsumption(ResourceScope.Machine)]
internal static bool InternalExists(String path, out int lastError) {
Contract.Requires(path != null);
String tempPath = Path.AddLongPathPrefix(path);
return Directory.InternalExists(tempPath, out lastError);
}
// Input to this method should already be fullpath. This method will ensure that we append
// the trailing slash only when appropriate and when thisDirOnly is specified append a "."
// at the end of the path to indicate that the demand is only for the fullpath and not
// everything underneath it.
[ResourceExposure(ResourceScope.None)]
[ResourceConsumption(ResourceScope.None, ResourceScope.None)]
private static String GetDemandDir(string fullPath, bool thisDirOnly)
{
String demandPath;
fullPath = Path.RemoveLongPathPrefix(fullPath);
if (thisDirOnly)
{
if (fullPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal)
|| fullPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal))
demandPath = fullPath + '.';
else
demandPath = fullPath + Path.DirectorySeparatorChar + '.';
}
else
{
if (!(fullPath.EndsWith(Path.DirectorySeparatorChar.ToString(), StringComparison.Ordinal)
|| fullPath.EndsWith(Path.AltDirectorySeparatorChar.ToString(), StringComparison.Ordinal)))
demandPath = fullPath + Path.DirectorySeparatorChar;
else
demandPath = fullPath;
}
return demandPath;
}
private static String InternalGetDirectoryRoot(String path)
{
if (path == null) return null;
return path.Substring(0, LongPath.GetRootLength(path));
}
}
}
| |
using System.Drawing;
using DotNet.Highcharts.Attributes;
using DotNet.Highcharts.Enums;
using DotNet.Highcharts.Helpers;
namespace DotNet.Highcharts.Options.PlotOptions
{
/// <summary>
/// A box plot is a convenient way of depicting groups of data through their five-number summaries: the smallest observation (sample minimum), lower quartile (Q1), median (Q2), upper quartile (Q3), and largest observation (sample maximum).
/// </summary>
public class PlotOptionsBoxplot
{
/// <summary>
/// Allow this series' points to be selected by clicking on the markers, bars or pie slices.
/// Default: false
/// </summary>
public bool? AllowPointSelect { get; set; }
/// <summary>
/// The main color or the series. In line type series it applies to the line and the point markers unless otherwise specified. In bar type series it applies to the bars unless a color is specified per point. The default value is pulled from the <code>options.colors</code> array.
/// </summary>
public Color? Color { get; set; }
/// <summary>
/// When using automatic point colors pulled from the <code>options.colors</code> collection, this option determines whether the chart should receive one color per series or one color per point.
/// Default: false
/// </summary>
public bool? ColorByPoint { get; set; }
/// <summary>
/// A series specific or series type specific color set to apply instead of the global <a href='#colors'>colors</a> when <a href='#plotOptions.column.colorByPoint'>colorByPoint</a> is true.
/// </summary>
public Color[] Colors { get; set; }
/// <summary>
/// You can set the cursor to 'pointer' if you have click events attached to the series, to signal to the user that the points and lines can be clicked.
/// </summary>
public Cursors? Cursor { get; set; }
/// <summary>
/// Enable or disable the mouse tracking for a specific series. This includes point tooltips and click events on graphs and points. For large datasets it improves performance.
/// Default: true
/// </summary>
public bool? EnableMouseTracking { get; set; }
public PlotOptionsBoxplotEvents Events { get; set; }
/// <summary>
/// The fill color of the box.
/// Default: #FFFFFF
/// </summary>
[JsonFormatter(addPropertyName: true, useCurlyBracketsForObject: false)]
public BackColorOrGradient FillColor { get; set; }
/// <summary>
/// Padding between each value groups, in x axis units.
/// Default: 0.2
/// </summary>
public Number? GroupPadding { get; set; }
/// <summary>
/// Whether to group non-stacked columns or to let them render independent of each other. Non-grouped columns will be laid out individually and overlap each other.
/// Default: true
/// </summary>
public bool? Grouping { get; set; }
/// <summary>
/// An id for the series. This can be used after render time to get a pointer to the series object through <code>chart.get()</code>.
/// </summary>
public string Id { get; set; }
/// <summary>
/// The width of the line surrounding the box. If any of <a href='#plotOptions.boxplot.stemWidth'>stemWidth</a>, <a href='#plotOptions.boxplot.medianWidth'>medianWidth</a> or <a href='#plotOptions.boxplot.whiskerWidth'>whiskerWidth</a> are <code>null</code>, the lineWidth also applies to these lines.
/// Default: 1
/// </summary>
public Number? LineWidth { get; set; }
/// <summary>
/// The <a href='#series.id'>id</a> of another series to link to. Additionally, the value can be ':previous' to link to the previous series. When two series are linked, only the first one appears in the legend. Toggling the visibility of this also toggles the linked series.
/// </summary>
public string LinkedTo { get; set; }
/// <summary>
/// The color of the median line. If <code>null</code>, the general series color applies.
/// Default: null
/// </summary>
public Color? MedianColor { get; set; }
/// <summary>
/// The pixel width of the median line. If <code>null</code>, the <a href='#plotOptions.boxplot.lineWidth'>lineWidth</a> is used.
/// Default: 2
/// </summary>
public Number? MedianWidth { get; set; }
/// <summary>
/// The color for the parts of the graph or points that are below the <a href='#plotOptions.series.threshold'>threshold</a>.
/// Default: null
/// </summary>
public Color? NegativeColor { get; set; }
/// <summary>
/// Properties for each single point
/// </summary>
public PlotOptionsBoxplotPoint Point { get; set; }
/// <summary>
/// <p>If no x values are given for the points in a series, pointInterval defines the interval of the x values. For example, if a series contains one value every decade starting from year 0, set pointInterval to 10.</p>.
/// Default: 1
/// </summary>
public Number? PointInterval { get; set; }
/// <summary>
/// Padding between each column or bar, in x axis units.
/// Default: 0.1
/// </summary>
public Number? PointPadding { get; set; }
/// <summary>
/// <p>Possible values: null, 'on', 'between'.</p><p>In a column chart, when pointPlacement is 'on', the point will not create any padding of the X axis. In a polar column chart this means that the first column points directly north. If the pointPlacement is 'between', the columns will be laid out between ticks. This is useful for example for visualising an amount between two points in time or in a certain sector of a polar chart.</p><p>Defaults to <code>null</code> in cartesian charts, <code>'between'</code> in polar charts.
/// </summary>
public Placement? PointPlacement { get; set; }
/// <summary>
/// The X axis range that each point is valid for. This determines the width of the column. On a categorized axis, the range will be 1 by default (one category unit). On linear and datetime axes, the range will be computed as the distance between the two closest data points.
/// </summary>
public Number? PointRange { get; set; }
/// <summary>
/// If no x values are given for the points in a series, pointStart defines on what value to start. For example, if a series contains one yearly value starting from 1945, set pointStart to 1945.
/// Default: 0
/// </summary>
[JsonFormatter(addPropertyName: false, useCurlyBracketsForObject: false)]
public PointStart PointStart { get; set; }
/// <summary>
/// A pixel value specifying a fixed width for each column or bar. When <code>null</code>, the width is calculated from the <code>pointPadding</code> and <code>groupPadding</code>.
/// </summary>
public Number? PointWidth { get; set; }
/// <summary>
/// Whether to select the series initially. If <code>showCheckbox</code> is true, the checkbox next to the series name will be checked for a selected series.
/// Default: false
/// </summary>
public bool? Selected { get; set; }
/// <summary>
/// If true, a checkbox is displayed next to the legend item to allow selecting the series. The state of the checkbox is determined by the <code>selected</code> option.
/// Default: false
/// </summary>
public bool? ShowCheckbox { get; set; }
/// <summary>
/// Whether to display this particular series or series type in the legend.
/// Default: true
/// </summary>
public bool? ShowInLegend { get; set; }
/// <summary>
/// A wrapper object for all the series options in specific states.
/// </summary>
public PlotOptionsBoxplotStates States { get; set; }
/// <summary>
/// The color of the stem, the vertical line extending from the box to the whiskers. If <code>null</code>, the series color is used.
/// Default: null
/// </summary>
public Color? StemColor { get; set; }
/// <summary>
/// The dash style of the stem, the vertical line extending from the box to the whiskers.
/// Default: Solid
/// </summary>
public DashStyles? StemDashStyle { get; set; }
/// <summary>
/// The width of the stem, the vertical line extending from the box to the whiskers. If <code>null</code>, the width is inherited from the <a href='#plotOptions.boxplot.lineWidth'>lineWidth</a> option.
/// Default: null
/// </summary>
public Number? StemWidth { get; set; }
/// <summary>
/// Sticky tracking of mouse events. When true, the <code>mouseOut</code> event on a series isn't triggered until the mouse moves over another series, or out of the plot area. When false, the <code>mouseOut</code> event on a series is triggered when the mouse leaves the area around the series' graph or markers. This also implies the tooltip. When <code>stickyTracking</code> is false and <code>tooltip.shared</code> is false, the tooltip will be hidden when moving the mouse between series.
/// Default: true
/// </summary>
public bool? StickyTracking { get; set; }
/// <summary>
/// A configuration object for the tooltip rendering of each single series. Properties are inherited from <a href='#tooltip'>tooltip</a>, but only the following properties can be defined on a series level.
/// </summary>
public PlotOptionsBoxplotTooltip Tooltip { get; set; }
/// <summary>
/// When a series contains a data array that is longer than this, only one dimensional arrays of numbers, or two dimensional arrays with x and y values are allowed. Also, only the first point is tested, and the rest are assumed to be the same format. This saves expensive data checking and indexing in long series.
/// Default: 1000
/// </summary>
public Number? TurboThreshold { get; set; }
/// <summary>
/// Set the initial visibility of the series.
/// Default: true
/// </summary>
public bool? Visible { get; set; }
/// <summary>
/// The color of the whiskers, the horizontal lines marking low and high values. When <code>null</code>, the general series color is used.
/// Default: null
/// </summary>
public Color? WhiskerColor { get; set; }
/// <summary>
/// The length of the whiskers, the horizontal lines marking low and high values. It can be a numerical pixel value, or a percentage value of the box width. Set <code>0</code> to disable whiskers.
/// Default: 50%
/// </summary>
[JsonFormatter(addPropertyName: true, useCurlyBracketsForObject: false)]
public PercentageOrPixel WhiskerLength { get; set; }
/// <summary>
/// The line width of the whiskers, the horizontal lines marking low and high values. When <code>null</code>, the general <a href='#plotOptions.boxplot.lineWidth'>lineWidth</a> applies.
/// Default: 2
/// </summary>
public Number? WhiskerWidth { get; set; }
/// <summary>
/// Define the z index of the series.
/// </summary>
public Number? ZIndex { get; set; }
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.